ngram
listlengths
0
67.8k
[ "= np.array(data) data[data == ''] = np.nan data = data.astype(float) dataDict = {}", "correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating", "0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self,", "'/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400,", "= self.importVISwavelengths() else: print('No valid reference for wavelength correction!') def importIRwavelengths(self): filename =", "1000: self.correction_spectrum = self.ImportIR() elif wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths) else:", "delimiter=',') headers = list(filter(None, next(reader))) data = [] for row in reader: data.append(row[:-1])", "print('No valid reference for wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict =", "data = np.array(data) data[data == ''] = np.nan data = data.astype(float) dataDict =", "d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating,", "= grating self.center_wavelength = center_wavelength if self.grating in (1250, 1600, 2000): self.wavelength =", "= dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths)", "valid reference for wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename)", "grating, center_wavelength, wavelengths = None): self.grating = grating self.center_wavelength = center_wavelength if grating", "import numpy as np import re import Spectrum #import matplotlib.pyplot as plt def", "self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv'", "from __future__ import print_function import csv import numpy as np import re import", "correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength]", ">= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths)", "= ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def", "open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data =", "print(columns_per_data) for hh in headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:,", "importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum", "#import matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename) as csvfile: reader = csv.reader(csvfile,", "np.nan data = data.astype(float) dataDict = {} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0])", "Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename)", "correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength]", "dataDict = {} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in", "= int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label]", "filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def", "SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths = None): self.grating = grating self.center_wavelength =", "ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self,", "= dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800, 750,", "ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp =", "def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp", "dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980)", "ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict", "center_wavelength): self.grating = grating self.center_wavelength = center_wavelength if self.grating in (1250, 1600, 2000):", "def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return", "= dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename", "reference for wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum", "valid reference for system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename)", "= data.astype(float) dataDict = {} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for", "data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict # Add error-checking for entering", "= dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename)", "elif self.grating in (500, 800): self.wavelength = self.importVISwavelengths() else: print('No valid reference for", "def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return", "columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label = tuple(map(int, re.findall(r'\\d+', hh)))", "for system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d =", "import re import Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename) as", "dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum", "2000): self.wavelength = self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength = self.importVISwavelengths() else:", "reference for system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d", "print('No valid reference for system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict =", "ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980) #p =", "== ''] = np.nan data = data.astype(float) dataDict = {} i = 0", "elif wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference for", "= '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0],", "self.grating = grating self.center_wavelength = center_wavelength if grating >= 1000: self.correction_spectrum = self.ImportIR()", "= self.ImportIR() elif wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid", "self.correction_spectrum = self.ImportIR() elif wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No", "'/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename", "error-checking for entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength,", "0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label = tuple(map(int, re.findall(r'\\d+',", "dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1])", "np.array(data) data[data == ''] = np.nan data = data.astype(float) dataDict = {} i", "i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label =", "in (500, 800): self.wavelength = self.importVISwavelengths() else: print('No valid reference for wavelength correction!')", "[] for row in reader: data.append(row[:-1]) data = np.array(data) data[data == ''] =", "WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength = center_wavelength if self.grating", "import Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename) as csvfile: reader", "return correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d =", "= ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv'", "wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths,", "__init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength = center_wavelength if self.grating in (1250,", "grating >= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths is not None: self.correction_spectrum =", "{} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label", "self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800, 750, wvls) #print(np.shape(p.correction_spectrum.SpectrumRange))", "csvfile: reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data = [] for", "= 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label = tuple(map(int,", "grating self.center_wavelength = center_wavelength if self.grating in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths()", "return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating,", "#wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800, 750, wvls) #print(np.shape(p.correction_spectrum.SpectrumRange)) #plt.plot(p.correction_spectrum.SpectrumRange, p.correction_spectrum.intensity) #plt.show()", "self.center_wavelength = center_wavelength if self.grating in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif", "(1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength =", "None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference for system correction!') def ImportIR(self):", "data = [] for row in reader: data.append(row[:-1]) data = np.array(data) data[data ==", "# Add error-checking for entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self,", "self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference for system correction!') def ImportIR(self): filename", "= self.ImportVis(wavelengths) else: print('No valid reference for system correction!') def ImportIR(self): filename =", "def __init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength = center_wavelength if self.grating in", "= Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict =", "ReadCSVRef(filename): with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader)))", "= {} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers:", "ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum =", "correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict =", "if self.grating in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating in (500,", "matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',')", "headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+=", "grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths = None): self.grating =", "return dataDict # Add error-checking for entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object):", "correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength = center_wavelength", "= np.nan data = data.astype(float) dataDict = {} i = 0 columns_per_data =", "def ReadCSVRef(filename): with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None,", "dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths", "self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength = self.importVISwavelengths() else: print('No valid reference", "label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data", "row in reader: data.append(row[:-1]) data = np.array(data) data[data == ''] = np.nan data", "in headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data]", "def __init__(self, grating, center_wavelength, wavelengths = None): self.grating = grating self.center_wavelength = center_wavelength", "ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum =", "np import re import Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename)", "in reader: data.append(row[:-1]) data = np.array(data) data[data == ''] = np.nan data =", "a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths = None):", "= '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls =", "grating self.center_wavelength = center_wavelength if grating >= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths", "= list(filter(None, next(reader))) data = [] for row in reader: data.append(row[:-1]) data =", "filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls", "for wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum =", "class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths = None): self.grating = grating self.center_wavelength", "''] = np.nan data = data.astype(float) dataDict = {} i = 0 columns_per_data", "in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength", "__init__(self, grating, center_wavelength, wavelengths = None): self.grating = grating self.center_wavelength = center_wavelength if", "for entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths", "None): self.grating = grating self.center_wavelength = center_wavelength if grating >= 1000: self.correction_spectrum =", "filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1],", "as plt def ReadCSVRef(filename): with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',') headers", "= None): self.grating = grating self.center_wavelength = center_wavelength if grating >= 1000: self.correction_spectrum", "Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename) as csvfile: reader =", "self.ImportVis(wavelengths) else: print('No valid reference for system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv'", "dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename =", "1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength = self.importVISwavelengths()", "= '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self):", "dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict # Add error-checking", "self.grating = grating self.center_wavelength = center_wavelength if self.grating in (1250, 1600, 2000): self.wavelength", "reader: data.append(row[:-1]) data = np.array(data) data[data == ''] = np.nan data = data.astype(float)", "import print_function import csv import numpy as np import re import Spectrum #import", "return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength =", "self.grating in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating in (500, 800):", "hh in headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:,", "else: print('No valid reference for wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict", "self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths =", "i+= columns_per_data return dataDict # Add error-checking for entering a non-existent grating/wavelength pair", "d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp,", "data[data == ''] = np.nan data = data.astype(float) dataDict = {} i =", "correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict", "self.center_wavelength = center_wavelength if grating >= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths is", "else: print('No valid reference for system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict", "= np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class", "'/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return", "= Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating =", "wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800,", "re import Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename): with open(filename) as csvfile:", "if grating >= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths is not None: self.correction_spectrum", "__future__ import print_function import csv import numpy as np import re import Spectrum", "= [] for row in reader: data.append(row[:-1]) data = np.array(data) data[data == '']", "import csv import numpy as np import re import Spectrum #import matplotlib.pyplot as", "self.importVISwavelengths() else: print('No valid reference for wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv'", "= grating self.center_wavelength = center_wavelength if grating >= 1000: self.correction_spectrum = self.ImportIR() elif", "= ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum", "= tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return", "d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def", "wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength", "Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating = grating", "center_wavelength, wavelengths = None): self.grating = grating self.center_wavelength = center_wavelength if grating >=", "correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)]", "wavelength correction!') def importIRwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv' dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating,", "reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data = [] for row", "importVISwavelengths(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv' dataDict = ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths", "grating, center_wavelength): self.grating = grating self.center_wavelength = center_wavelength if self.grating in (1250, 1600,", "= self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength = self.importVISwavelengths() else: print('No valid", "800): self.wavelength = self.importVISwavelengths() else: print('No valid reference for wavelength correction!') def importIRwavelengths(self):", "correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength]", "= data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict # Add error-checking for", "self.wavelength = self.importIRwavelengths() elif self.grating in (500, 800): self.wavelength = self.importVISwavelengths() else: print('No", "1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength):", "is not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference for system correction!')", "spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum", "class WavelengthCorrectionFactor(object): def __init__(self, grating, center_wavelength): self.grating = grating self.center_wavelength = center_wavelength if", "not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference for system correction!') def", "as csvfile: reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data = []", "wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference for system", "hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict # Add", "filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:,", "= csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data = [] for row in", "data[:, i:i+columns_per_data] i+= columns_per_data return dataDict # Add error-checking for entering a non-existent", "data = data.astype(float) dataDict = {} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data)", "wavelengths #wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800, 750, wvls) #print(np.shape(p.correction_spectrum.SpectrumRange)) #plt.plot(p.correction_spectrum.SpectrumRange, p.correction_spectrum.intensity)", "plt def ReadCSVRef(filename): with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',') headers =", "for hh in headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data]", "= ReadCSVRef(filename) wavelengths = dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980) #p", "np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return correction_spectrum class WavelengthCorrectionFactor(object):", "self.ImportIR() elif wavelengths is not None: self.correction_spectrum = self.ImportVis(wavelengths) else: print('No valid reference", "tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict", "numpy as np import re import Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename):", "pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths = None): self.grating = grating", "csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data = [] for row in reader:", "(500, 800): self.wavelength = self.importVISwavelengths() else: print('No valid reference for wavelength correction!') def", "d[:,0]) return correction_spectrum def ImportVis(self, wavelengths): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d", "dataDict # Add error-checking for entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def", "'/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv' dataDict = ReadCSVRef(filename) d = dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:,", "print_function import csv import numpy as np import re import Spectrum #import matplotlib.pyplot", "re.findall(r'\\d+', hh))) dataDict[label] = data[:, i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict #", "self.grating in (500, 800): self.wavelength = self.importVISwavelengths() else: print('No valid reference for wavelength", "columns_per_data return dataDict # Add error-checking for entering a non-existent grating/wavelength pair class", "for row in reader: data.append(row[:-1]) data = np.array(data) data[data == ''] = np.nan", "self.wavelength = self.importVISwavelengths() else: print('No valid reference for wavelength correction!') def importIRwavelengths(self): filename", "data.astype(float) dataDict = {} i = 0 columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh", "headers = list(filter(None, next(reader))) data = [] for row in reader: data.append(row[:-1]) data", "i:i+columns_per_data] data[:, i:i+columns_per_data] i+= columns_per_data return dataDict # Add error-checking for entering a", "center_wavelength if grating >= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths is not None:", "= '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0])", "return wavelengths #wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800, 750, wvls) #print(np.shape(p.correction_spectrum.SpectrumRange)) #plt.plot(p.correction_spectrum.SpectrumRange,", "d = dataDict[self.grating, self.center_wavelength] correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0]) return correction_spectrum def ImportVis(self, wavelengths):", "Add error-checking for entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating,", "as np import re import Spectrum #import matplotlib.pyplot as plt def ReadCSVRef(filename): with", "dataDict[self.grating, self.center_wavelength] return wavelengths #wvls = np.linspace(400, 980) #p = SystemCorrectionFactor(800, 750, wvls)", "list(filter(None, next(reader))) data = [] for row in reader: data.append(row[:-1]) data = np.array(data)", "system correction!') def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating,", "csv import numpy as np import re import Spectrum #import matplotlib.pyplot as plt", "next(reader))) data = [] for row in reader: data.append(row[:-1]) data = np.array(data) data[data", "data.append(row[:-1]) data = np.array(data) data[data == ''] = np.nan data = data.astype(float) dataDict", "with open(filename) as csvfile: reader = csv.reader(csvfile, delimiter=',') headers = list(filter(None, next(reader))) data", "i:i+columns_per_data] i+= columns_per_data return dataDict # Add error-checking for entering a non-existent grating/wavelength", "center_wavelength if self.grating in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating in", "dataDict = ReadCSVRef(filename) correction_spectrum = dataDict[self.grating, self.center_wavelength] return correction_spectrum def importVISwavelengths(self): filename =", "def ImportIR(self): filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv' dataDict = ReadCSVRef(filename) d = dataDict[self.grating, self.center_wavelength] correction_spectrum", "entering a non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths =", "= center_wavelength if grating >= 1000: self.correction_spectrum = self.ImportIR() elif wavelengths is not", "dataDict[(self.grating,)] spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1]) correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths) return", "wavelengths = None): self.grating = grating self.center_wavelength = center_wavelength if grating >= 1000:", "= center_wavelength if self.grating in (1250, 1600, 2000): self.wavelength = self.importIRwavelengths() elif self.grating", "int(np.shape(data[0])[0]/np.shape(headers)[0]) print(columns_per_data) for hh in headers: label = tuple(map(int, re.findall(r'\\d+', hh))) dataDict[label] =", "non-existent grating/wavelength pair class SystemCorrectionFactor(object): def __init__(self, grating, center_wavelength, wavelengths = None): self.grating" ]
[ "False # Token was successfully validated return True def generate_token(email=None): \"\"\" Returns a", "the registered email address.\\n Keyword Arguments: token -- Token passed in the user's", "not isinstance(email, str) or len(email) == 0: print(\"Error: Invalid Email address passed\") return", "def validate_token(token=None): \"\"\"Helps in confirming the Email Address with the help of the", "def generate_token(email=None): \"\"\" Returns a token for the purpose of email verification.\\n Keyword", "registered email address.\\n Keyword Arguments: token -- Token passed in the user's email", "token is to be generated \"\"\" if not isinstance(email, str) or len(email) ==", "SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set in 'import.json' file\") def", "MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\") #", "is valid for 1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else:", "sent on the registered email address.\\n Keyword Arguments: token -- Token passed in", "max_age=MAX_TIME) except SignatureExpired: return False # Token was successfully validated return True def", "== 0: print(\"Error: Invalid Email address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT)", "is to be generated \"\"\" if not isinstance(email, str) or len(email) == 0:", "SignatureExpired: return False # Token was successfully validated return True def generate_token(email=None): \"\"\"", "!= 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set in 'import.json'", "len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set in", "Arguments: token -- Token passed in the user's email \"\"\" try: res =", "else: raise Exception(\"Property 'email_verification_salt' not set in 'import.json' file\") # Secret Key if", "token -- Token passed in the user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token,", "if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set", "\"\"\" Returns a token for the purpose of email verification.\\n Keyword Arguments email", "address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return token for the", "import load config = None with open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"]", "load config = None with open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"] #", "config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set in 'import.json' file\") # Secret Key", "from itsdangerous import URLSafeTimedSerializer, SignatureExpired from json import load config = None with", "Key if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not", "len(email) == 0: print(\"Error: Invalid Email address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email,", "with open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"] # Token is valid for", "!= 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set in 'import.json'", "f: config = load(f)[\"jsondata\"] # Token is valid for 1 day if len(config[\"email_verification_timeout\"])", "0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set in 'import.json' file\")", "set in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"]", "return True def generate_token(email=None): \"\"\" Returns a token for the purpose of email", "raise Exception(\"Property 'email_verification_secret' not set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming", "None with open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"] # Token is valid", "len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set in", "a token for the purpose of email verification.\\n Keyword Arguments email -- Email", "config = None with open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"] # Token", "itsdangerous import URLSafeTimedSerializer, SignatureExpired from json import load config = None with open(\"import.json\",", "address for which the token is to be generated \"\"\" if not isinstance(email,", "on the registered email address.\\n Keyword Arguments: token -- Token passed in the", "not set in 'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET", "day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not", "import URLSafeTimedSerializer, SignatureExpired from json import load config = None with open(\"import.json\", \"r\")", "<reponame>Aayush-hub/Bulk-Mailer<gh_stars>0 from itsdangerous import URLSafeTimedSerializer, SignatureExpired from json import load config = None", "in the user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired:", "email verification.\\n Keyword Arguments email -- Email address for which the token is", "be generated \"\"\" if not isinstance(email, str) or len(email) == 0: print(\"Error: Invalid", "\"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False # Token", "\"\"\" if not isinstance(email, str) or len(email) == 0: print(\"Error: Invalid Email address", "return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return token for the email return", "'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else:", "email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False #", "file\") def validate_token(token=None): \"\"\"Helps in confirming the Email Address with the help of", "of email verification.\\n Keyword Arguments email -- Email address for which the token", "the purpose of email verification.\\n Keyword Arguments email -- Email address for which", "Token is valid for 1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"])", "of the token, sent on the registered email address.\\n Keyword Arguments: token --", "for the purpose of email verification.\\n Keyword Arguments email -- Email address for", "Exception(\"Property 'email_verification_secret' not set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming the", "passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return token for the email", "VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set in 'import.json' file\") #", "= None with open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"] # Token is", "int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\") # Salt if", "else: raise Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"])", "in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming the Email Address with the", "0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set in 'import.json' file\")", "!= 0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set in 'import.json'", "confirming the Email Address with the help of the token, sent on the", "# Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt'", "Email address for which the token is to be generated \"\"\" if not", "Invalid Email address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return token", "the Email Address with the help of the token, sent on the registered", "the help of the token, sent on the registered email address.\\n Keyword Arguments:", "the token, sent on the registered email address.\\n Keyword Arguments: token -- Token", "generate_token(email=None): \"\"\" Returns a token for the purpose of email verification.\\n Keyword Arguments", "Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) != 0:", "'email_verification_secret' not set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming the Email", "-- Token passed in the user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT,", "raise Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) !=", "True def generate_token(email=None): \"\"\" Returns a token for the purpose of email verification.\\n", "set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming the Email Address with", "help of the token, sent on the registered email address.\\n Keyword Arguments: token", "Email Address with the help of the token, sent on the registered email", "if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set", "Token was successfully validated return True def generate_token(email=None): \"\"\" Returns a token for", "None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return token for the email return token", "# Token is valid for 1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME =", "purpose of email verification.\\n Keyword Arguments email -- Email address for which the", "= URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False # Token was successfully validated", "for 1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property", "for which the token is to be generated \"\"\" if not isinstance(email, str)", "token for the purpose of email verification.\\n Keyword Arguments email -- Email address", "'email_verification_timeout' not set in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT", "Address with the help of the token, sent on the registered email address.\\n", "Exception(\"Property 'email_verification_salt' not set in 'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"]) !=", "email address.\\n Keyword Arguments: token -- Token passed in the user's email \"\"\"", "Returns a token for the purpose of email verification.\\n Keyword Arguments email --", "which the token is to be generated \"\"\" if not isinstance(email, str) or", "email -- Email address for which the token is to be generated \"\"\"", "0: print(\"Error: Invalid Email address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ##", "-- Email address for which the token is to be generated \"\"\" if", "validate_token(token=None): \"\"\"Helps in confirming the Email Address with the help of the token,", "= config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set in 'import.json' file\") def validate_token(token=None):", "# Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property", "to be generated \"\"\" if not isinstance(email, str) or len(email) == 0: print(\"Error:", "address.\\n Keyword Arguments: token -- Token passed in the user's email \"\"\" try:", "except SignatureExpired: return False # Token was successfully validated return True def generate_token(email=None):", "not set in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT =", "passed in the user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except", "in confirming the Email Address with the help of the token, sent on", "user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False", "with the help of the token, sent on the registered email address.\\n Keyword", "isinstance(email, str) or len(email) == 0: print(\"Error: Invalid Email address passed\") return None", "Keyword Arguments email -- Email address for which the token is to be", "the user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return", "the token is to be generated \"\"\" if not isinstance(email, str) or len(email)", "res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False # Token was successfully", "json import load config = None with open(\"import.json\", \"r\") as f: config =", "successfully validated return True def generate_token(email=None): \"\"\" Returns a token for the purpose", "generated \"\"\" if not isinstance(email, str) or len(email) == 0: print(\"Error: Invalid Email", "'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming the Email Address with the help", "token, sent on the registered email address.\\n Keyword Arguments: token -- Token passed", "URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False # Token was successfully validated return", "Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not", "was successfully validated return True def generate_token(email=None): \"\"\" Returns a token for the", "str) or len(email) == 0: print(\"Error: Invalid Email address passed\") return None token", "as f: config = load(f)[\"jsondata\"] # Token is valid for 1 day if", "file\") # Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else: raise", "SignatureExpired from json import load config = None with open(\"import.json\", \"r\") as f:", "Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret'", "1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout'", "try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False # Token was", "if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set", "# Token was successfully validated return True def generate_token(email=None): \"\"\" Returns a token", "'email_verification_salt' not set in 'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"]) != 0:", "config[\"email_verification_secret\"] else: raise Exception(\"Property 'email_verification_secret' not set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps", "Keyword Arguments: token -- Token passed in the user's email \"\"\" try: res", "open(\"import.json\", \"r\") as f: config = load(f)[\"jsondata\"] # Token is valid for 1", "= load(f)[\"jsondata\"] # Token is valid for 1 day if len(config[\"email_verification_timeout\"]) != 0:", "Email address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return token for", "else: raise Exception(\"Property 'email_verification_secret' not set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in", "\"\"\"Helps in confirming the Email Address with the help of the token, sent", "'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise", "verification.\\n Keyword Arguments email -- Email address for which the token is to", "file\") # Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else: raise Exception(\"Property", "not set in 'import.json' file\") def validate_token(token=None): \"\"\"Helps in confirming the Email Address", "Arguments email -- Email address for which the token is to be generated", "print(\"Error: Invalid Email address passed\") return None token = URLSafeTimedSerializer(SECRET).dumps(email, salt=VERIFICATION_SALT) ## Return", "set in 'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET =", "load(f)[\"jsondata\"] # Token is valid for 1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME", "raise Exception(\"Property 'email_verification_salt' not set in 'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"])", "valid for 1 day if len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise", "in 'import.json' file\") # Salt if len(config[\"email_verification_timeout\"]) != 0: VERIFICATION_SALT = config[\"email_verification_salt\"] else:", "0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\")", "len(config[\"email_verification_timeout\"]) != 0: MAX_TIME = int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set in", "from json import load config = None with open(\"import.json\", \"r\") as f: config", "salt=VERIFICATION_SALT, max_age=MAX_TIME) except SignatureExpired: return False # Token was successfully validated return True", "= config[\"email_verification_salt\"] else: raise Exception(\"Property 'email_verification_salt' not set in 'import.json' file\") # Secret", "return False # Token was successfully validated return True def generate_token(email=None): \"\"\" Returns", "config = load(f)[\"jsondata\"] # Token is valid for 1 day if len(config[\"email_verification_timeout\"]) !=", "or len(email) == 0: print(\"Error: Invalid Email address passed\") return None token =", "if not isinstance(email, str) or len(email) == 0: print(\"Error: Invalid Email address passed\")", "in 'import.json' file\") # Secret Key if len(config[\"email_verification_timeout\"]) != 0: SECRET = config[\"email_verification_secret\"]", "validated return True def generate_token(email=None): \"\"\" Returns a token for the purpose of", "Token passed in the user's email \"\"\" try: res = URLSafeTimedSerializer(SECRET).loads(token, salt=VERIFICATION_SALT, max_age=MAX_TIME)", "URLSafeTimedSerializer, SignatureExpired from json import load config = None with open(\"import.json\", \"r\") as", "= int(config[\"email_verification_timeout\"]) else: raise Exception(\"Property 'email_verification_timeout' not set in 'import.json' file\") # Salt", "\"r\") as f: config = load(f)[\"jsondata\"] # Token is valid for 1 day" ]
[ "/) \"\"\" html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add classes", "for the evennia_wiki. This pre-loads some common extensions and allows some inner processing.", ". We assume an absolute # URL (/) means a wiki page. for", "MarkdownEngine(Markdown): \"\"\"A special markdown engine for the evennia_wiki. This pre-loads some common extensions", "evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special", "an absolute # URL (/) means a wiki page. for tag in soup.find_all(\"a\"):", "used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown):", "in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" # Add classes to table headers", "for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link location of pointing", "some classes. 1. Table elements will have classes table table-responsive table-striped 2. Table", "generic markdown engine used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown import", "import BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine for", "'html.parser') # Add classes to tables for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table", "self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the text to", "in soup.find_all(\"a\"): href = tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" +", "tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" + href return str(soup) ENGINE", "thead-inverse 3. Links elements will be re-mapped if absolute (beginning by /) \"\"\"", "and allows some inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables',", "elements will be re-mapped if absolute (beginning by /) \"\"\" html = super(MarkdownEngine,", "absolute (beginning by /) \"\"\" html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser')", "headers for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link location of", "if absolute (beginning by /) \"\"\" html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html,", "= \"thead-inverse\" # Change link location of pointing to /* . We assume", "the generic markdown engine used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown", "to HTML, changing some classes. 1. Table elements will have classes table table-responsive", "engine used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown import Markdown class", "location of pointing to /* . We assume an absolute # URL (/)", "convert(self, text): \"\"\"Convert the text to HTML, changing some classes. 1. Table elements", "This pre-loads some common extensions and allows some inner processing. \"\"\" def __init__(self):", "elements will have classes table table-responsive table-striped 2. Table headers will have the", "will be re-mapped if absolute (beginning by /) \"\"\" html = super(MarkdownEngine, self).convert(text)", "will have the class thead-inverse 3. Links elements will be re-mapped if absolute", "table-striped 2. Table headers will have the class thead-inverse 3. Links elements will", "have classes table table-responsive table-striped 2. Table headers will have the class thead-inverse", "inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def", "table headers for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link location", "by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A", "soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link location of pointing to /* .", "# Change link location of pointing to /* . We assume an absolute", "super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the text", "special markdown engine for the evennia_wiki. This pre-loads some common extensions and allows", "= super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add classes to tables for", "tag[\"class\"] = \"table table-responsive table-striped\" # Add classes to table headers for tag", "table-striped\" # Add classes to table headers for tag in soup.find_all(\"thead\"): tag[\"class\"] =", "of pointing to /* . We assume an absolute # URL (/) means", "]) def convert(self, text): \"\"\"Convert the text to HTML, changing some classes. 1.", "class MarkdownEngine(Markdown): \"\"\"A special markdown engine for the evennia_wiki. This pre-loads some common", "some inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ])", "2. Table headers will have the class thead-inverse 3. Links elements will be", "classes to table headers for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change", "from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine for the evennia_wiki.", "means a wiki page. for tag in soup.find_all(\"a\"): href = tag.get(\"href\") if href", "'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the text to HTML, changing", "= BeautifulSoup(html, 'html.parser') # Add classes to tables for tag in soup.find_all(\"table\"): tag[\"class\"]", "\"thead-inverse\" # Change link location of pointing to /* . We assume an", "allows some inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc',", "BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine for the", "extensions and allows some inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes',", "processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self,", "/* . We assume an absolute # URL (/) means a wiki page.", "to table headers for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link", "soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" # Add classes to table headers for", "bs4 import BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine", "= \"table table-responsive table-striped\" # Add classes to table headers for tag in", "soup.find_all(\"a\"): href = tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" + href", "Links elements will be re-mapped if absolute (beginning by /) \"\"\" html =", "page. for tag in soup.find_all(\"a\"): href = tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"]", "'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the text to HTML, changing some", "be re-mapped if absolute (beginning by /) \"\"\" html = super(MarkdownEngine, self).convert(text) soup", "1. Table elements will have classes table table-responsive table-striped 2. Table headers will", "for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" # Add classes to", "table-responsive table-striped 2. Table headers will have the class thead-inverse 3. Links elements", "by /) \"\"\" html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add", "\"\"\" html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add classes to", "a wiki page. for tag in soup.find_all(\"a\"): href = tag.get(\"href\") if href and", "\"\"\"A special markdown engine for the evennia_wiki. This pre-loads some common extensions and", "class thead-inverse 3. Links elements will be re-mapped if absolute (beginning by /)", "pointing to /* . We assume an absolute # URL (/) means a", "will have classes table table-responsive table-striped 2. Table headers will have the class", "Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine for the evennia_wiki. This pre-loads some", "soup = BeautifulSoup(html, 'html.parser') # Add classes to tables for tag in soup.find_all(\"table\"):", "Change link location of pointing to /* . We assume an absolute #", "classes. 1. Table elements will have classes table table-responsive table-striped 2. Table headers", "BeautifulSoup(html, 'html.parser') # Add classes to tables for tag in soup.find_all(\"table\"): tag[\"class\"] =", "have the class thead-inverse 3. Links elements will be re-mapped if absolute (beginning", "to tables for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" # Add", "tag in soup.find_all(\"a\"): href = tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\"", "Table elements will have classes table table-responsive table-striped 2. Table headers will have", "\"\"\"Class containing the generic markdown engine used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup", "'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the text to HTML, changing some classes.", "for tag in soup.find_all(\"a\"): href = tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] =", "classes table table-responsive table-striped 2. Table headers will have the class thead-inverse 3.", "\"table table-responsive table-striped\" # Add classes to table headers for tag in soup.find_all(\"thead\"):", "'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the text to HTML,", "markdown engine for the evennia_wiki. This pre-loads some common extensions and allows some", "self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add classes to tables for tag in", "tag[\"class\"] = \"thead-inverse\" # Change link location of pointing to /* . We", "markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine for the evennia_wiki. This", "URL (/) means a wiki page. for tag in soup.find_all(\"a\"): href = tag.get(\"href\")", "wiki page. for tag in soup.find_all(\"a\"): href = tag.get(\"href\") if href and href.startswith(\"/\"):", "the class thead-inverse 3. Links elements will be re-mapped if absolute (beginning by", "def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert", "__init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text): \"\"\"Convert the", "the text to HTML, changing some classes. 1. Table elements will have classes", "= tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" + href return str(soup)", "pre-loads some common extensions and allows some inner processing. \"\"\" def __init__(self): super(MarkdownEngine,", "re-mapped if absolute (beginning by /) \"\"\" html = super(MarkdownEngine, self).convert(text) soup =", "assume an absolute # URL (/) means a wiki page. for tag in", "evennia_wiki. This pre-loads some common extensions and allows some inner processing. \"\"\" def", "def convert(self, text): \"\"\"Convert the text to HTML, changing some classes. 1. Table", "Table headers will have the class thead-inverse 3. Links elements will be re-mapped", "to /* . We assume an absolute # URL (/) means a wiki", "table-responsive table-striped\" # Add classes to table headers for tag in soup.find_all(\"thead\"): tag[\"class\"]", "in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link location of pointing to /*", "# Add classes to table headers for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\"", "3. Links elements will be re-mapped if absolute (beginning by /) \"\"\" html", "(beginning by /) \"\"\" html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') #", "from bs4 import BeautifulSoup from markdown import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown", "absolute # URL (/) means a wiki page. for tag in soup.find_all(\"a\"): href", "href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" + href return str(soup) ENGINE = MarkdownEngine()", "text): \"\"\"Convert the text to HTML, changing some classes. 1. Table elements will", "# Add classes to tables for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive", "containing the generic markdown engine used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from", "text to HTML, changing some classes. 1. Table elements will have classes table", "Add classes to table headers for tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" #", "markdown engine used by evenniq_wiki.\"\"\" from bs4 import BeautifulSoup from markdown import Markdown", "tag in soup.find_all(\"thead\"): tag[\"class\"] = \"thead-inverse\" # Change link location of pointing to", "link location of pointing to /* . We assume an absolute # URL", "Add classes to tables for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\"", "if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" + href return str(soup) ENGINE =", "super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add classes to tables for tag", "import Markdown class MarkdownEngine(Markdown): \"\"\"A special markdown engine for the evennia_wiki. This pre-loads", "headers will have the class thead-inverse 3. Links elements will be re-mapped if", "html = super(MarkdownEngine, self).convert(text) soup = BeautifulSoup(html, 'html.parser') # Add classes to tables", "We assume an absolute # URL (/) means a wiki page. for tag", "some common extensions and allows some inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[", "# URL (/) means a wiki page. for tag in soup.find_all(\"a\"): href =", "engine for the evennia_wiki. This pre-loads some common extensions and allows some inner", "classes to tables for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" #", "changing some classes. 1. Table elements will have classes table table-responsive table-striped 2.", "the evennia_wiki. This pre-loads some common extensions and allows some inner processing. \"\"\"", "common extensions and allows some inner processing. \"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code',", "tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" # Add classes to table", "\"\"\" def __init__(self): super(MarkdownEngine, self).__init__(extensions=[ 'markdown.extensions.fenced_code', 'markdown.extensions.footnotes', 'markdown.extensions.tables', 'markdown.extensions.toc', ]) def convert(self, text):", "(/) means a wiki page. for tag in soup.find_all(\"a\"): href = tag.get(\"href\") if", "table table-responsive table-striped 2. Table headers will have the class thead-inverse 3. Links", "tables for tag in soup.find_all(\"table\"): tag[\"class\"] = \"table table-responsive table-striped\" # Add classes", "\"\"\"Convert the text to HTML, changing some classes. 1. Table elements will have", "href = tag.get(\"href\") if href and href.startswith(\"/\"): tag[\"href\"] = \"/wiki\" + href return", "HTML, changing some classes. 1. Table elements will have classes table table-responsive table-striped" ]
[ "# '''data is a list of pairs (lemma, form) # ''' # terms", "== '__main__': # if len(sys.argv) != 2: # print('Usage: python generate_cannonical.py <csv file>')", "== 'A' and msd[3] == 'm': form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0]", "= lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not found' else: for word in", "form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column", "in results: print('{}'.format(canon)) # if __name__ == '__main__': # if len(sys.argv) != 2:", "= head.feats feats_dict = {} feats = feats.strip().split('|') for f in feats: f", "= feats.strip().split('|') for f in feats: f = f.strip().split('=') feats_dict[f[0]] = f[1] gender", "== 'Fem': msd = word.xpos[:-1]+'n' elif gender == 'Neut': msd = word.xpos[:-1]+'n' else:", "msd = word.xpos[:-1]+'y' elif gender == 'Fem': msd = word.xpos[:-1]+'n' elif gender ==", "elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang,", "doc = process_nlp_pipeline('sl', text) # # result = [] # for term, sent,", "sent, lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return result def", "term.words: if word.upos == 'NOUN' or word.upos == 'PROPN': head = word break", "determine delimiter, assuming Excel CSV dialect.') dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile,", "word.xpos[:-1]+'y' elif gender == 'Fem': msd = word.xpos[:-1]+'n' elif gender == 'Neut': msd", "= Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not", "'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] ==", "data if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to canonical form in Slovene", "[t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head", "print('{}'.format(canon)) # if __name__ == '__main__': # if len(sys.argv) != 2: # print('Usage:", "feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos) == 6:", "= read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data) # for canon in results:", "= [] for word in term.words: if word.upos == 'NOUN' or word.upos ==", "lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return result def process(forms):", "[] # for term, sent, lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent)))", "feats = head.feats feats_dict = {} feats = feats.strip().split('|') for f in feats:", "1: head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return", "word.xpos[:-1]+'n' elif gender == 'Neut': msd = word.xpos[:-1]+'n' else: msd = None return", "word.id > head.id: post.append(word) canon = [] for el in pre: msd =", "if len(term.words) == 1: head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form", "(lemma, form) # ''' # terms = [x[1] for x in data] #", "gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender", "if msd[0] == 'A' and msd[3] == 'm': form = lem_adj('m', el.text.lower()) canon.append(form)", "pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head = None pre = [] post", "== '__main__': parser = argparse.ArgumentParser(description='Converter to canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'),", "= read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon in results: print('{}'.format(canon)) # if", "return doc def get_adj_msd(head, word): feats = head.feats feats_dict = {} feats =", "read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data) # for canon in results: #", "canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV", "feats = feats.strip().split('|') for f in feats: f = f.strip().split('=') feats_dict[f[0]] = f[1]", "[find_canon(sent) for sent in doc.sentences] def read_csv(fname, columnID=0): data = [] with open(fname)", "= csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter, assuming Excel CSV dialect.') dialect", "None pre = [] post = [] for word in term.words: if word.upos", "list of pairs (lemma, form) # ''' # terms = [x[1] for x", "is None: canon.append(el.lemma.lower()) else: if msd[0] == 'A' and msd[3] == 'm': form", "'Masc' and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif gender == 'Masc' and", "lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form", "# def process(data): # '''data is a list of pairs (lemma, form) #", "parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)') args", "post = [] for word in term.words: if word.upos == 'NOUN' or word.upos", "lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text) return '", "f = f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1]", "lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR,", "and msd[3] == 'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A'", "generate_cannonical.py <csv file>') # else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results", "Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text) return", "doc.sentences] def read_csv(fname, columnID=0): data = [] with open(fname) as csvfile: try: dialect", "argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd):", "= feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos) ==", "# if len(sys.argv) != 2: # print('Usage: python generate_cannonical.py <csv file>') # else:", "data = [] with open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error:", "read_csv(fname, columnID=0): data = [] with open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048))", "csv.Error: print('Warning: cannot determine delimiter, assuming Excel CSV dialect.') dialect = 'excel' csvfile.seek(0)", "'__main__': parser = argparse.ArgumentParser(description='Converter to canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input", "found' else: for word in term.words: if word.id < head.id: pre.append(word) elif word.id", "= '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) # # result = [] #", "lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text) return ' '.join(canon) # def process(data):", "feats.strip().split('|') for f in feats: f = f.strip().split('=') feats_dict[f[0]] = f[1] gender =", "matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head = None pre = [] post =", "# ''' # terms = [x[1] for x in data] # lemmas =", "def read_csv(fname, columnID=0): data = [] with open(fname) as csvfile: try: dialect =", "f in feats: f = f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender)", "'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp =", "x in data] # lemmas = [x[0] for x in data] # text", "= f[1] gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender == 'Masc'", "wrd): lem = Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender ==", "= Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR,", "len(sys.argv) != 2: # print('Usage: python generate_cannonical.py <csv file>') # else: # data", "data] # text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) # # result", "msd[0] == 'A' and msd[3] == 'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif", "form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text)", "to canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int,", "return ' '.join(canon) # def process(data): # '''data is a list of pairs", "dialect.') dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i, row in", "= [] # for term, sent, lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma,", "= argparse.ArgumentParser(description='Converter to canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file')", "argparse.ArgumentParser(description='Converter to canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id',", "process(data): # '''data is a list of pairs (lemma, form) # ''' #", "def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return", "print('Error, line {}'.format(i)) return data if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to", "in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head = None pre", "and msd[3] == 'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR,", "tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc def get_adj_msd(head, word): feats = head.feats", "elif msd[0] == 'A' and msd[3] == 'f': form = lem_adj('f', el.text.lower()) canon.append(form)", "el in pre: msd = get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower()) else:", "[] for word in term.words: if word.upos == 'NOUN' or word.upos == 'PROPN':", "== 7: msd = word.xpos[:-1]+'y' elif gender == 'Fem': msd = word.xpos[:-1]+'n' elif", "el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'f': form = lem_adj('f',", "logging_level='WARNING') doc = nlp(text) return doc def get_adj_msd(head, word): feats = head.feats feats_dict", "result.append((lemma, find_canon(sent))) # return result def process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl',", "python generate_cannonical.py <csv file>') # else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t') #", "get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower()) else: if msd[0] == 'A' and", "data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data) # for canon in", "__name__ == '__main__': # if len(sys.argv) != 2: # print('Usage: python generate_cannonical.py <csv", "'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form =", "word in term.words: if word.id < head.id: pre.append(word) elif word.id > head.id: post.append(word)", "language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)')", "mylist[i].text.lower() == pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return", "canon.append(form) elif msd[0] == 'A' and msd[3] == 'f': form = lem_adj('f', el.text.lower())", "'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def", "word break if head is None: if len(term.words) == 1: head2 = term.words[0]", "elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form", "= Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text)", "reader = csv.reader(csvfile, dialect) for i, row in enumerate(reader): try: data.append(row[columnID]) except: print('Error,", "else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data) # for", "# result = [] # for term, sent, lemma in zip(terms, doc.sentences, lemmas):", "CSV dialect.') dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i, row", "as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter, assuming", "t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head = None", "form = lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower())", "= lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form)", "== 'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form", "term.words: if word.id < head.id: pre.append(word) elif word.id > head.id: post.append(word) canon =", "# lemmas = [x[0] for x in data] # text = '\\n'.join(terms) #", "word.xpos[:-1]+'n' else: msd = None return msd def subfinder(mylist, pattern): matches = []", "in data] # text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) # #", "data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data if __name__ == '__main__': parser =", "if msd is None: canon.append(el.lemma.lower()) else: if msd[0] == 'A' and msd[3] ==", "lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'f': form =", "'m': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n':", "= classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc def get_adj_msd(head, word):", "msd[0] == 'A' and msd[3] == 'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem", "2: # print('Usage: python generate_cannonical.py <csv file>') # else: # data = read_csv(sys.argv[1],", "# return result def process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return", "= 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i, row in enumerate(reader): try:", "gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text):", "f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender", "<csv file>') # else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results =", "help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)') args = parser.parse_args()", "head_form else: return 'HEAD not found' else: for word in term.words: if word.id", "elif gender == 'Fem': msd = word.xpos[:-1]+'n' elif gender == 'Neut': msd =", "el) if msd is None: canon.append(el.lemma.lower()) else: if msd[0] == 'A' and msd[3]", "and [t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term):", "import csv import argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__)", "def find_canon(term): head = None pre = [] post = [] for word", "< head.id: pre.append(word) elif word.id > head.id: post.append(word) canon = [] for el", "# # result = [] # for term, sent, lemma in zip(terms, doc.sentences,", "line {}'.format(i)) return data if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to canonical", "for f in feats: f = f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender']", "for x in data] # lemmas = [x[0] for x in data] #", "args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon in", "form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'n':", "== 'PROPN': head = word break if head is None: if len(term.words) ==", "lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'n': form =", "7: msd = word.xpos[:-1]+'y' elif gender == 'Fem': msd = word.xpos[:-1]+'n' elif gender", "process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in", "msd = word.xpos[:-1]+'n' else: msd = None return msd def subfinder(mylist, pattern): matches", "msd[3] == 'm': form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A' and", "dialect) for i, row in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return", "== pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head = None pre = []", "parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon in results: print('{}'.format(canon))", "return data if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to canonical form in", "process_nlp_pipeline('sl', text) # # result = [] # for term, sent, lemma in", "'Fem': msd = word.xpos[:-1]+'n' elif gender == 'Neut': msd = word.xpos[:-1]+'n' else: msd", "process(data) for canon in results: print('{}'.format(canon)) # if __name__ == '__main__': # if", "except: print('Error, line {}'.format(i)) return data if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter", "[] with open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot", "csv file') parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)') args = parser.parse_args() data", "in feats: f = f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender) #gender", "'Masc' and len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif gender == 'Fem': msd", "for i, row in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data", "Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not found'", "== 'm': form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3]", "doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in doc.sentences] def read_csv(fname, columnID=0):", "head.feats feats_dict = {} feats = feats.strip().split('|') for f in feats: f =", "None: if len(term.words) == 1: head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin'))", "> head.id: post.append(word) canon = [] for el in pre: msd = get_adj_msd(head,", "classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer() if gender", "= word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos) == 7: msd = word.xpos[:-1]+'y'", "msd[3] == 'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin'))", "x in data] # text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) #", "= word break if head is None: if len(term.words) == 1: head2 =", "gender == 'Neut': msd = word.xpos[:-1]+'n' else: msd = None return msd def", "= None return msd def subfinder(mylist, pattern): matches = [] for i in", "pairs (lemma, form) # ''' # terms = [x[1] for x in data]", "lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for", "classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc def get_adj_msd(head, word): feats", "find_canon(term): head = None pre = [] post = [] for word in", "in data] # lemmas = [x[0] for x in data] # text =", "classla import csv import argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR =", "msd = get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower()) else: if msd[0] ==", "from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem", "msd is None: canon.append(el.lemma.lower()) else: if msd[0] == 'A' and msd[3] == 'm':", "result = [] # for term, sent, lemma in zip(terms, doc.sentences, lemmas): #", "in pre: msd = get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower()) else: if", "if len(sys.argv) != 2: # print('Usage: python generate_cannonical.py <csv file>') # else: #", "'\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in doc.sentences] def read_csv(fname,", "columnID=0, sep='\\t') # results = process(data) # for canon in results: # print('{}'.format(canon))", "'__main__': # if len(sys.argv) != 2: # print('Usage: python generate_cannonical.py <csv file>') #", "help='CSV column number (zero indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results", "el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'n': form = lem_adj('n',", "word.upos == 'NOUN' or word.upos == 'PROPN': head = word break if head", "# result.append((lemma, find_canon(sent))) # return result def process(forms): text = '\\n'.join(forms) doc =", "mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]]", "msd def subfinder(mylist, pattern): matches = [] for i in range(len(mylist)): #print(mylist[i].text) #if", "[x[0] for x in data] # text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl',", "for sent in doc.sentences] def read_csv(fname, columnID=0): data = [] with open(fname) as", "= [] for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text", "= [x[1] for x in data] # lemmas = [x[0] for x in", "pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern:", "i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern: if", "'Neut': msd = word.xpos[:-1]+'n' else: msd = None return msd def subfinder(mylist, pattern):", "canon in results: print('{}'.format(canon)) # if __name__ == '__main__': # if len(sys.argv) !=", "doc = nlp(text) return doc def get_adj_msd(head, word): feats = head.feats feats_dict =", "== 6: msd = word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos) == 7:", "column number (zero indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results =", "pre = [] post = [] for word in term.words: if word.upos ==", "mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head = None pre =", "result def process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for", "Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer() if", "el in post: canon.append(el.text) return ' '.join(canon) # def process(data): # '''data is", "word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif", "else: for word in term.words: if word.id < head.id: pre.append(word) elif word.id >", "head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form", "not found' else: for word in term.words: if word.id < head.id: pre.append(word) elif", "'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not found' else: for", "elif gender == 'Masc' and len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif gender", "'HEAD not found' else: for word in term.words: if word.id < head.id: pre.append(word)", "lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD", "try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter, assuming Excel CSV", "process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc", "= process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in doc.sentences] def read_csv(fname, columnID=0): data", "lemmas): # result.append((lemma, find_canon(sent))) # return result def process(forms): text = '\\n'.join(forms) doc", "results: print('{}'.format(canon)) # if __name__ == '__main__': # if len(sys.argv) != 2: #", "== 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd)", "read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon in results: print('{}'.format(canon)) # if __name__", "processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc def get_adj_msd(head, word): feats =", "is a list of pairs (lemma, form) # ''' # terms = [x[1]", "lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang,", "= nlp(text) return doc def get_adj_msd(head, word): feats = head.feats feats_dict = {}", "<filename>services/web/canonizer.py import os import classla import csv import argparse from lemmagen3 import Lemmatizer", "feats: f = f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender) #gender =", "gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif", "'A' and msd[3] == 'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem = Lemmatizer()", "subfinder(mylist, pattern): matches = [] for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text ==", "[] for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text ==", "text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) # # result = []", "''' # terms = [x[1] for x in data] # lemmas = [x[0]", "import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer()", "return 'HEAD not found' else: for word in term.words: if word.id < head.id:", "canon = [] for el in pre: msd = get_adj_msd(head, el) if msd", "in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column number", "!= 2: # print('Usage: python generate_cannonical.py <csv file>') # else: # data =", "[] post = [] for word in term.words: if word.upos == 'NOUN' or", "canon.append(el.text) return ' '.join(canon) # def process(data): # '''data is a list of", "type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)') args =", "(zero indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for", "of pairs (lemma, form) # ''' # terms = [x[1] for x in", "file') parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)') args = parser.parse_args() data =", "Excel CSV dialect.') dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i,", "pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower() for t", "in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower()", "Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv file') parser.add_argument('column_id', type=int, help='CSV column number (zero", "# doc = process_nlp_pipeline('sl', text) # # result = [] # for term,", "[] for el in pre: msd = get_adj_msd(head, el) if msd is None:", "el.text.lower()) canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el", "= [x[0] for x in data] # text = '\\n'.join(terms) # doc =", "None: canon.append(el.lemma.lower()) else: if msd[0] == 'A' and msd[3] == 'm': form =", "return head_form else: return 'HEAD not found' else: for word in term.words: if", "pre: msd = get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower()) else: if msd[0]", "== 'A' and msd[3] == 'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0]", "enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data if __name__ == '__main__':", "head.id: post.append(word) canon = [] for el in pre: msd = get_adj_msd(head, el)", "canon.append(form) elif msd[0] == 'A' and msd[3] == 'n': form = lem_adj('n', el.text.lower())", "or word.upos == 'PROPN': head = word break if head is None: if", "for word in term.words: if word.upos == 'NOUN' or word.upos == 'PROPN': head", "parser = argparse.ArgumentParser(description='Converter to canonical form in Slovene language') parser.add_argument('csv_file', type=argparse.FileType('r'), help='Input csv", "= word.xpos[:-1]+'y' elif gender == 'Fem': msd = word.xpos[:-1]+'n' elif gender == 'Neut':", "form) # ''' # terms = [x[1] for x in data] # lemmas", "= process(data) for canon in results: print('{}'.format(canon)) # if __name__ == '__main__': #", "pattern): matches = [] for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0]", "columnID=0): data = [] with open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except", "= None pre = [] post = [] for word in term.words: if", "# print('Usage: python generate_cannonical.py <csv file>') # else: # data = read_csv(sys.argv[1], columnID=0,", "f[1] gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender == 'Masc' and", "form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True,", "doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return result def process(forms): text = '\\n'.join(forms)", "sent in doc.sentences] def read_csv(fname, columnID=0): data = [] with open(fname) as csvfile:", "canon.append(el.lemma.lower()) else: if msd[0] == 'A' and msd[3] == 'm': form = lem_adj('m',", "csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter, assuming Excel CSV dialect.') dialect =", "text) # # result = [] # for term, sent, lemma in zip(terms,", "columnID=args.column_id) results = process(data) for canon in results: print('{}'.format(canon)) # if __name__ ==", "{} feats = feats.strip().split('|') for f in feats: f = f.strip().split('=') feats_dict[f[0]] =", "lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not found' else: for word in term.words:", "def get_adj_msd(head, word): feats = head.feats feats_dict = {} feats = feats.strip().split('|') for", "= [] with open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning:", "number (zero indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data)", "= word.xpos[:-1]+'n' else: msd = None return msd def subfinder(mylist, pattern): matches =", "Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin'))", "== pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]] ==", "return matches def find_canon(term): head = None pre = [] post = []", "msd = None return msd def subfinder(mylist, pattern): matches = [] for i", "[x[1] for x in data] # lemmas = [x[0] for x in data]", "if head is None: if len(term.words) == 1: head2 = term.words[0] lem =", "indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon", "for word in term.words: if word.id < head.id: pre.append(word) elif word.id > head.id:", "= lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'f': form", "parser.add_argument('column_id', type=int, help='CSV column number (zero indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name,", "results = process(data) for canon in results: print('{}'.format(canon)) # if __name__ == '__main__':", "BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer() if gender == 'm':", "lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem =", "for canon in results: print('{}'.format(canon)) # if __name__ == '__main__': # if len(sys.argv)", "else: if msd[0] == 'A' and msd[3] == 'm': form = lem_adj('m', el.text.lower())", "terms = [x[1] for x in data] # lemmas = [x[0] for x", "canon.append(head_form) for el in post: canon.append(el.text) return ' '.join(canon) # def process(data): #", "def process(data): # '''data is a list of pairs (lemma, form) # '''", "= word.xpos[:-1]+'n' elif gender == 'Neut': msd = word.xpos[:-1]+'n' else: msd = None", "pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def", "mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower()", "find_canon(sent))) # return result def process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text)", "def process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for sent", "data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon in results: print('{}'.format(canon)) #", "return msd def subfinder(mylist, pattern): matches = [] for i in range(len(mylist)): #print(mylist[i].text)", "term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else: return", "== 'Masc' and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif gender == 'Masc'", "and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos)", "import classla import csv import argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR", "== pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches", "get_adj_msd(head, word): feats = head.feats feats_dict = {} feats = feats.strip().split('|') for f", "row in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data if __name__", "= '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in doc.sentences] def", "try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data if __name__ == '__main__': parser", "'A' and msd[3] == 'm': form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] ==", "print('Warning: cannot determine delimiter, assuming Excel CSV dialect.') dialect = 'excel' csvfile.seek(0) reader", "print('Usage: python generate_cannonical.py <csv file>') # else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t')", "if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif", "gender == 'Masc' and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif gender ==", "== 'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3]", "# terms = [x[1] for x in data] # lemmas = [x[0] for", "'NOUN' or word.upos == 'PROPN': head = word break if head is None:", "open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter,", "# for term, sent, lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) #", "and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower() for t in", "dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i, row in enumerate(reader):", "in doc.sentences] def read_csv(fname, columnID=0): data = [] with open(fname) as csvfile: try:", "6: msd = word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos) == 7: msd", "except csv.Error: print('Warning: cannot determine delimiter, assuming Excel CSV dialect.') dialect = 'excel'", "= csv.reader(csvfile, dialect) for i, row in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line", "== 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender ==", "len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos) ==", "in post: canon.append(el.text) return ' '.join(canon) # def process(data): # '''data is a", "= gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny'", "= [] post = [] for word in term.words: if word.upos == 'NOUN'", "nlp(text) return doc def get_adj_msd(head, word): feats = head.feats feats_dict = {} feats", "'A' and msd[3] == 'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] ==", "if __name__ == '__main__': # if len(sys.argv) != 2: # print('Usage: python generate_cannonical.py", "lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post:", "head = None pre = [] post = [] for word in term.words:", "doc def get_adj_msd(head, word): feats = head.feats feats_dict = {} feats = feats.strip().split('|')", "and len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif gender == 'Fem': msd =", "head is None: if len(term.words) == 1: head2 = term.words[0] lem = Lemmatizer()", "msd[3] == 'f': form = lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A' and", "csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter, assuming Excel", "= term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else:", "head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text) return ' '.join(canon) #", "in term.words: if word.id < head.id: pre.append(word) elif word.id > head.id: post.append(word) canon", "= [] for el in pre: msd = get_adj_msd(head, el) if msd is", "= get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower()) else: if msd[0] == 'A'", "head_form = lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not found' else: for word", "'''data is a list of pairs (lemma, form) # ''' # terms =", "= {} feats = feats.strip().split('|') for f in feats: f = f.strip().split('=') feats_dict[f[0]]", "range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() ==", "gender == 'Masc' and len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif gender ==", "lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower()) return head_form else: return 'HEAD not found' else:", "lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc", "import argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender,", "'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin'))", "None return msd def subfinder(mylist, pattern): matches = [] for i in range(len(mylist)):", "= lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING')", "form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'f':", "for x in data] # text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text)", "head.id: pre.append(word) elif word.id > head.id: post.append(word) canon = [] for el in", "text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in doc.sentences]", "process_nlp_pipeline('sl', text) return [find_canon(sent) for sent in doc.sentences] def read_csv(fname, columnID=0): data =", "break if head is None: if len(term.words) == 1: head2 = term.words[0] lem", "== 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp", "= f.strip().split('=') feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if", "else: msd = None return msd def subfinder(mylist, pattern): matches = [] for", "for el in pre: msd = get_adj_msd(head, el) if msd is None: canon.append(el.lemma.lower())", "'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text) return ' '.join(canon)", "== pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0] and [t.text.lower() for", "lem_adj(gender, wrd): lem = Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender", "' '.join(canon) # def process(data): # '''data is a list of pairs (lemma,", "# text = '\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) # # result =", "'\\n'.join(terms) # doc = process_nlp_pipeline('sl', text) # # result = [] # for", "if gender == 'Masc' and len(word.xpos) == 6: msd = word.xpos[:-1]+'ny' elif gender", "post: canon.append(el.text) return ' '.join(canon) # def process(data): # '''data is a list", "= parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id) results = process(data) for canon in results:", "i, row in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data if", "csv import argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def", "else: return 'HEAD not found' else: for word in term.words: if word.id <", "matches = [] for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and", "elif msd[0] == 'A' and msd[3] == 'n': form = lem_adj('n', el.text.lower()) canon.append(form)", "text) return [find_canon(sent) for sent in doc.sentences] def read_csv(fname, columnID=0): data = []", "return form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc =", "= lem.lemmatize(head.text.lower()) canon.append(head_form) for el in post: canon.append(el.text) return ' '.join(canon) # def", "msd[0] == 'A' and msd[3] == 'm': form = lem_adj('m', el.text.lower()) canon.append(form) elif", "for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)]) return matches def find_canon(term): head =", "# else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data) #", "if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to canonical form in Slovene language')", "return [find_canon(sent) for sent in doc.sentences] def read_csv(fname, columnID=0): data = [] with", "pre.append(word) elif word.id > head.id: post.append(word) canon = [] for el in pre:", "head = word break if head is None: if len(term.words) == 1: head2", "zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return result def process(forms): text =", "dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine delimiter, assuming Excel CSV dialect.')", "nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc def get_adj_msd(head,", "= lem_adj('f', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] == 'n': form", "if mylist[i].text.lower() == pattern[0] and [t.text.lower() for t in mylist[i:i+len(pattern)]] == pattern: matches.append(mylist[i:i+len(pattern)])", "for term, sent, lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return", "len(term.words) == 1: head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form =", "delimiter, assuming Excel CSV dialect.') dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect)", "cannot determine delimiter, assuming Excel CSV dialect.') dialect = 'excel' csvfile.seek(0) reader =", "a list of pairs (lemma, form) # ''' # terms = [x[1] for", "#if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0] and", "csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i, row in enumerate(reader): try: data.append(row[columnID]) except:", "{}'.format(i)) return data if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to canonical form", "'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return form def process_nlp_pipeline(lang, text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma',", "== 'Neut': msd = word.xpos[:-1]+'n' else: msd = None return msd def subfinder(mylist,", "if word.id < head.id: pre.append(word) elif word.id > head.id: post.append(word) canon = []", "= process_nlp_pipeline('sl', text) # # result = [] # for term, sent, lemma", "#gender = gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos) == 6: msd =", "def subfinder(mylist, pattern): matches = [] for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text", "#print(gender) #gender = gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos) == 6: msd", "data] # lemmas = [x[0] for x in data] # text = '\\n'.join(terms)", "def lem_adj(gender, wrd): lem = Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif", "gender == 'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form =", "lem = Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin')) elif gender == 'f':", "canon.append(form) lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head.text.lower()) canon.append(head_form) for el in", "len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif gender == 'Fem': msd = word.xpos[:-1]+'n'", "if word.upos == 'NOUN' or word.upos == 'PROPN': head = word break if", "word.upos == 'PROPN': head = word break if head is None: if len(term.words)", "in term.words: if word.upos == 'NOUN' or word.upos == 'PROPN': head = word", "and msd[3] == 'm': form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A'", "word in term.words: if word.upos == 'NOUN' or word.upos == 'PROPN': head =", "'m': form = lem_adj('m', el.text.lower()) canon.append(form) elif msd[0] == 'A' and msd[3] ==", "elif word.id > head.id: post.append(word) canon = [] for el in pre: msd", "in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i)) return data if __name__ ==", "# if __name__ == '__main__': # if len(sys.argv) != 2: # print('Usage: python", "post.append(word) canon = [] for el in pre: msd = get_adj_msd(head, el) if", "feats_dict = {} feats = feats.strip().split('|') for f in feats: f = f.strip().split('=')", "with open(fname) as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(2048)) except csv.Error: print('Warning: cannot determine", "'.join(canon) # def process(data): # '''data is a list of pairs (lemma, form)", "'PROPN': head = word break if head is None: if len(term.words) == 1:", "== 'NOUN' or word.upos == 'PROPN': head = word break if head is", "for i in range(len(mylist)): #print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern:", "feats_dict[f[0]] = f[1] gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender ==", "csv.reader(csvfile, dialect) for i, row in enumerate(reader): try: data.append(row[columnID]) except: print('Error, line {}'.format(i))", "word): feats = head.feats feats_dict = {} feats = feats.strip().split('|') for f in", "'f': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-female.bin')) elif gender == 'n': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-neutral.bin')) form = lem.lemmatize(wrd) return", "file>') # else: # data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data)", "os import classla import csv import argparse from lemmagen3 import Lemmatizer classla.download('sl', logging_level='WARNING')", "type=int, help='CSV column number (zero indexed)') args = parser.parse_args() data = read_csv(args.csv_file.name, columnID=args.column_id)", "== 'Masc' and len(word.xpos) == 7: msd = word.xpos[:-1]+'y' elif gender == 'Fem':", "msd = word.xpos[:-1]+'n' elif gender == 'Neut': msd = word.xpos[:-1]+'n' else: msd =", "#print(mylist[i].text) #if mylist[i].text == pattern[0] and mylist[i:i+len(pattern)].text == pattern: if mylist[i].text.lower() == pattern[0]", "term, sent, lemma in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return result", "text): nlp = classla.Pipeline(lang=lang, processors='tokenize,pos,lemma', tokenize_pretokenized=True, logging_level='WARNING') doc = nlp(text) return doc def", "= os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR,", "# data = read_csv(sys.argv[1], columnID=0, sep='\\t') # results = process(data) # for canon", "gender == 'Fem': msd = word.xpos[:-1]+'n' elif gender == 'Neut': msd = word.xpos[:-1]+'n'", "msd = word.xpos[:-1]+'ny' elif gender == 'Masc' and len(word.xpos) == 7: msd =", "for el in post: canon.append(el.text) return ' '.join(canon) # def process(data): # '''data", "word.id < head.id: pre.append(word) elif word.id > head.id: post.append(word) canon = [] for", "return result def process(forms): text = '\\n'.join(forms) doc = process_nlp_pipeline('sl', text) return [find_canon(sent)", "logging_level='WARNING') BASEDIR = os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer() if gender ==", "== 1: head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon.bin')) head_form = lem.lemmatize(head2.text.lower())", "in zip(terms, doc.sentences, lemmas): # result.append((lemma, find_canon(sent))) # return result def process(forms): text", "import os import classla import csv import argparse from lemmagen3 import Lemmatizer classla.download('sl',", "assuming Excel CSV dialect.') dialect = 'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for", "lemmas = [x[0] for x in data] # text = '\\n'.join(terms) # doc", "'excel' csvfile.seek(0) reader = csv.reader(csvfile, dialect) for i, row in enumerate(reader): try: data.append(row[columnID])", "== 'A' and msd[3] == 'n': form = lem_adj('n', el.text.lower()) canon.append(form) lem =", "os.path.dirname(__file__) def lem_adj(gender, wrd): lem = Lemmatizer() if gender == 'm': lem.load_model(os.path.join(BASEDIR, 'lemmagen_models/kanon-adj-male.bin'))", "__name__ == '__main__': parser = argparse.ArgumentParser(description='Converter to canonical form in Slovene language') parser.add_argument('csv_file',", "matches def find_canon(term): head = None pre = [] post = [] for", "gender = feats_dict['Gender'] #print(gender) #gender = gender.strip().split('=')[1] if gender == 'Masc' and len(word.xpos)", "is None: if len(term.words) == 1: head2 = term.words[0] lem = Lemmatizer() lem.load_model(os.path.join(BASEDIR,", "elif gender == 'Neut': msd = word.xpos[:-1]+'n' else: msd = None return msd" ]
[ "props: print(f\"No properties for shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation not", "= Event() e.name = shift.properties.name e.begin = shift.beginning e.duration = shift.properties.duration c.events.add(e) return", "from_dict( cls, input: dict[str, str], mapper: Optional[dict] = None ) -> \"Roster\": shifts", "date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c = Calendar() for shift in self.shifts:", "Roster: shifts: list[Shift] name: str = \"<NAME>\" _year: int = 2022 _month: int", "to edit: 00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell", "_datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str, str], mapper: Optional[dict] =", "'Shift', 'Roster'] # Cell from dataclasses import dataclass from datetime import datetime, timedelta,", "int = 2022 _month: int = 3 # TODO: Read from Excel _dayp", "from datetime import datetime, timedelta, date, time from ics import Calendar, Event import", "e = Event() e.name = shift.properties.name e.begin = shift.beginning e.duration = shift.properties.duration c.events.add(e)", "DO NOT EDIT! File to edit: 00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties',", "ShiftProperties: name: str starting_hour: timedelta duration: timedelta @dataclass class Shift: properties: ShiftProperties date:", ") -> \"Roster\": shifts = [] # TODO: This whole continue stuff is", "print(f\"Shift abbrevation not found in mapper: {abbr}\") continue date = datetime( year=cls._year, month=cls._month,", "class Roster: shifts: list[Shift] name: str = \"<NAME>\" _year: int = 2022 _month:", "me! for date_str, abbr in input.items(): if abbr == \"(/)\": continue try: props", "= ['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses import dataclass from datetime import", "import dataclass from datetime import datetime, timedelta, date, time from ics import Calendar,", "= self.date + self.properties.starting_hour # Cell @dataclass class Roster: shifts: list[Shift] name: str", "shift in self.shifts: e = Event() e.name = shift.properties.name e.begin = shift.beginning e.duration", "Calendar() for shift in self.shifts: e = Event() e.name = shift.properties.name e.begin =", "[] # TODO: This whole continue stuff is just horrible. Change it future", "it future me! for date_str, abbr in input.items(): if abbr == \"(/)\": continue", "NOT EDIT! File to edit: 00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties', 'Shift',", "= datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift) return", "dict[str, str], mapper: Optional[dict] = None ) -> \"Roster\": shifts = [] #", "\"Roster\": shifts = [] # TODO: This whole continue stuff is just horrible.", "def from_dict( cls, input: dict[str, str], mapper: Optional[dict] = None ) -> \"Roster\":", "{abbr}\") continue except KeyError: print(f\"Shift abbrevation not found in mapper: {abbr}\") continue date", "def to_ics(self): c = Calendar() for shift in self.shifts: e = Event() e.name", "datetime import datetime, timedelta, date, time from ics import Calendar, Event import re", "properties for shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation not found in", "(unless otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses import", "name: str = \"<NAME>\" _year: int = 2022 _month: int = 3 #", "\"(/)\": continue try: props = mapper[abbr] if not props: print(f\"No properties for shift", "try: props = mapper[abbr] if not props: print(f\"No properties for shift abbrevation: {abbr}\")", "= Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c = Calendar() for shift", "__post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour # Cell @dataclass class Roster: shifts:", "# TODO: This whole continue stuff is just horrible. Change it future me!", "Change it future me! for date_str, abbr in input.items(): if abbr == \"(/)\":", "abbr in input.items(): if abbr == \"(/)\": continue try: props = mapper[abbr] if", ") shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c = Calendar()", "except KeyError: print(f\"Shift abbrevation not found in mapper: {abbr}\") continue date = datetime(", "duration: timedelta @dataclass class Shift: properties: ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime", "File to edit: 00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] #", "to_ics(self): c = Calendar() for shift in self.shifts: e = Event() e.name =", "if abbr == \"(/)\": continue try: props = mapper[abbr] if not props: print(f\"No", "@dataclass class ShiftProperties: name: str starting_hour: timedelta duration: timedelta @dataclass class Shift: properties:", "Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c = Calendar() for shift in", "name: str starting_hour: timedelta duration: timedelta @dataclass class Shift: properties: ShiftProperties date: datetime", "self.properties.starting_hour # Cell @dataclass class Roster: shifts: list[Shift] name: str = \"<NAME>\" _year:", "self.date + self.properties.starting_hour # Cell @dataclass class Roster: shifts: list[Shift] name: str =", "date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift)", "re from typing import Optional from zoneinfo import ZoneInfo @dataclass class ShiftProperties: name:", "edit: 00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell from", "dataclasses import dataclass from datetime import datetime, timedelta, date, time from ics import", "\"<NAME>\" _year: int = 2022 _month: int = 3 # TODO: Read from", "2022 _month: int = 3 # TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\")", "KeyError: print(f\"Shift abbrevation not found in mapper: {abbr}\") continue date = datetime( year=cls._year,", "= Calendar() for shift in self.shifts: e = Event() e.name = shift.properties.name e.begin", "is just horrible. Change it future me! for date_str, abbr in input.items(): if", "shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c = Calendar() for shift in self.shifts: e", "datetime, timedelta, date, time from ics import Calendar, Event import re from typing", "= re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str, str], mapper: Optional[dict] = None", "Optional from zoneinfo import ZoneInfo @dataclass class ShiftProperties: name: str starting_hour: timedelta duration:", "TODO: This whole continue stuff is just horrible. Change it future me! for", "just horrible. Change it future me! for date_str, abbr in input.items(): if abbr", "def __post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour # Cell @dataclass class Roster:", "# Cell @dataclass class Roster: shifts: list[Shift] name: str = \"<NAME>\" _year: int", "if not props: print(f\"No properties for shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift", "timedelta @dataclass class Shift: properties: ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime =", "@dataclass class Shift: properties: ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime = self.date", "date: datetime def __post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour # Cell @dataclass", "# AUTOGENERATED! DO NOT EDIT! File to edit: 00_roster.ipynb (unless otherwise specified). __all__", "_month: int = 3 # TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep", "cls(shifts=shifts) def to_ics(self): c = Calendar() for shift in self.shifts: e = Event()", "list[Shift] name: str = \"<NAME>\" _year: int = 2022 _month: int = 3", "in input.items(): if abbr == \"(/)\": continue try: props = mapper[abbr] if not", "Event import re from typing import Optional from zoneinfo import ZoneInfo @dataclass class", "EDIT! File to edit: 00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster']", "day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c", "__all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses import dataclass from datetime", "ics import Calendar, Event import re from typing import Optional from zoneinfo import", "= [] # TODO: This whole continue stuff is just horrible. Change it", "# Cell from dataclasses import dataclass from datetime import datetime, timedelta, date, time", "Cell @dataclass class Roster: shifts: list[Shift] name: str = \"<NAME>\" _year: int =", "@classmethod def from_dict( cls, input: dict[str, str], mapper: Optional[dict] = None ) ->", "datetime def __post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour # Cell @dataclass class", "typing import Optional from zoneinfo import ZoneInfo @dataclass class ShiftProperties: name: str starting_hour:", "continue try: props = mapper[abbr] if not props: print(f\"No properties for shift abbrevation:", "str = \"<NAME>\" _year: int = 2022 _month: int = 3 # TODO:", "+ self.properties.starting_hour # Cell @dataclass class Roster: shifts: list[Shift] name: str = \"<NAME>\"", "shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c = Calendar() for", "horrible. Change it future me! for date_str, abbr in input.items(): if abbr ==", "input.items(): if abbr == \"(/)\": continue try: props = mapper[abbr] if not props:", "int = 3 # TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep =", "input: dict[str, str], mapper: Optional[dict] = None ) -> \"Roster\": shifts = []", "mapper: Optional[dict] = None ) -> \"Roster\": shifts = [] # TODO: This", "= None ) -> \"Roster\": shifts = [] # TODO: This whole continue", "shifts: list[Shift] name: str = \"<NAME>\" _year: int = 2022 _month: int =", "== \"(/)\": continue try: props = mapper[abbr] if not props: print(f\"No properties for", "continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date)", "print(f\"No properties for shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation not found", "from dataclasses import dataclass from datetime import datetime, timedelta, date, time from ics", "import re from typing import Optional from zoneinfo import ZoneInfo @dataclass class ShiftProperties:", "from typing import Optional from zoneinfo import ZoneInfo @dataclass class ShiftProperties: name: str", "Optional[dict] = None ) -> \"Roster\": shifts = [] # TODO: This whole", "tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self): c =", "{abbr}\") continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props,", "not found in mapper: {abbr}\") continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"),", "for shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation not found in mapper:", "abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation not found in mapper: {abbr}\") continue", "year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def", "Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls,", "ZoneInfo @dataclass class ShiftProperties: name: str starting_hour: timedelta duration: timedelta @dataclass class Shift:", "c = Calendar() for shift in self.shifts: e = Event() e.name = shift.properties.name", "str starting_hour: timedelta duration: timedelta @dataclass class Shift: properties: ShiftProperties date: datetime def", "properties: ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour #", "['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses import dataclass from datetime import datetime,", "_year: int = 2022 _month: int = 3 # TODO: Read from Excel", "shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation not found in mapper: {abbr}\")", "Shift: properties: ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour", "continue except KeyError: print(f\"Shift abbrevation not found in mapper: {abbr}\") continue date =", "= 3 # TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\")", "Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str,", "abbr == \"(/)\": continue try: props = mapper[abbr] if not props: print(f\"No properties", "continue stuff is just horrible. Change it future me! for date_str, abbr in", "for date_str, abbr in input.items(): if abbr == \"(/)\": continue try: props =", "mapper: {abbr}\") continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift =", "str], mapper: Optional[dict] = None ) -> \"Roster\": shifts = [] # TODO:", "found in mapper: {abbr}\") continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), )", "Cell from dataclasses import dataclass from datetime import datetime, timedelta, date, time from", "# TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def", "-> \"Roster\": shifts = [] # TODO: This whole continue stuff is just", "for shift in self.shifts: e = Event() e.name = shift.properties.name e.begin = shift.beginning", "'Roster'] # Cell from dataclasses import dataclass from datetime import datetime, timedelta, date,", "zoneinfo import ZoneInfo @dataclass class ShiftProperties: name: str starting_hour: timedelta duration: timedelta @dataclass", "whole continue stuff is just horrible. Change it future me! for date_str, abbr", "shifts = [] # TODO: This whole continue stuff is just horrible. Change", "self.beginning: datetime = self.date + self.properties.starting_hour # Cell @dataclass class Roster: shifts: list[Shift]", "future me! for date_str, abbr in input.items(): if abbr == \"(/)\": continue try:", "self.shifts: e = Event() e.name = shift.properties.name e.begin = shift.beginning e.duration = shift.properties.duration", "in self.shifts: e = Event() e.name = shift.properties.name e.begin = shift.beginning e.duration =", "timedelta, date, time from ics import Calendar, Event import re from typing import", "class Shift: properties: ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime = self.date +", "timedelta duration: timedelta @dataclass class Shift: properties: ShiftProperties date: datetime def __post_init__(self): self.beginning:", "TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict(", "datetime = self.date + self.properties.starting_hour # Cell @dataclass class Roster: shifts: list[Shift] name:", "Calendar, Event import re from typing import Optional from zoneinfo import ZoneInfo @dataclass", "import Optional from zoneinfo import ZoneInfo @dataclass class ShiftProperties: name: str starting_hour: timedelta", "class ShiftProperties: name: str starting_hour: timedelta duration: timedelta @dataclass class Shift: properties: ShiftProperties", "AUTOGENERATED! DO NOT EDIT! File to edit: 00_roster.ipynb (unless otherwise specified). __all__ =", "date, time from ics import Calendar, Event import re from typing import Optional", "stuff is just horrible. Change it future me! for date_str, abbr in input.items():", "re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str, str], mapper: Optional[dict] = None )", "= mapper[abbr] if not props: print(f\"No properties for shift abbrevation: {abbr}\") continue except", "abbrevation not found in mapper: {abbr}\") continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()),", "from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input:", "import ZoneInfo @dataclass class ShiftProperties: name: str starting_hour: timedelta duration: timedelta @dataclass class", "mapper[abbr] if not props: print(f\"No properties for shift abbrevation: {abbr}\") continue except KeyError:", "cls, input: dict[str, str], mapper: Optional[dict] = None ) -> \"Roster\": shifts =", "from zoneinfo import ZoneInfo @dataclass class ShiftProperties: name: str starting_hour: timedelta duration: timedelta", "00_roster.ipynb (unless otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses", "time from ics import Calendar, Event import re from typing import Optional from", "date_str, abbr in input.items(): if abbr == \"(/)\": continue try: props = mapper[abbr]", "= \"<NAME>\" _year: int = 2022 _month: int = 3 # TODO: Read", "in mapper: {abbr}\") continue date = datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift", "re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str, str], mapper: Optional[dict]", "3 # TODO: Read from Excel _dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod", "Event() e.name = shift.properties.name e.begin = shift.beginning e.duration = shift.properties.duration c.events.add(e) return c", "import Calendar, Event import re from typing import Optional from zoneinfo import ZoneInfo", "datetime( year=cls._year, month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts)", "return cls(shifts=shifts) def to_ics(self): c = Calendar() for shift in self.shifts: e =", "not props: print(f\"No properties for shift abbrevation: {abbr}\") continue except KeyError: print(f\"Shift abbrevation", "ShiftProperties date: datetime def __post_init__(self): self.beginning: datetime = self.date + self.properties.starting_hour # Cell", "This whole continue stuff is just horrible. Change it future me! for date_str,", "import datetime, timedelta, date, time from ics import Calendar, Event import re from", "month=cls._month, day=int(cls._datep.search(date_str).group()), tzinfo=ZoneInfo(\"Europe/Berlin\"), ) shift = Shift(props, date=date) shifts.append(shift) return cls(shifts=shifts) def to_ics(self):", "@dataclass class Roster: shifts: list[Shift] name: str = \"<NAME>\" _year: int = 2022", "otherwise specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses import dataclass", "props = mapper[abbr] if not props: print(f\"No properties for shift abbrevation: {abbr}\") continue", "from ics import Calendar, Event import re from typing import Optional from zoneinfo", "dataclass from datetime import datetime, timedelta, date, time from ics import Calendar, Event", "= re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str, str], mapper:", "specified). __all__ = ['ShiftProperties', 'Shift', 'Roster'] # Cell from dataclasses import dataclass from", "_dayp = re.compile(r\"MO|DI|MI|DO|FR|SA|SO\") _datep = re.compile(r\"\\d{2}\") @classmethod def from_dict( cls, input: dict[str, str],", "starting_hour: timedelta duration: timedelta @dataclass class Shift: properties: ShiftProperties date: datetime def __post_init__(self):", "None ) -> \"Roster\": shifts = [] # TODO: This whole continue stuff", "= 2022 _month: int = 3 # TODO: Read from Excel _dayp =" ]
[ "-- BEGIN as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n')", "i = 3 while i != 100000: ArtName = raw_input('\\n\\ntype the name of", "'') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox", "= content2.replace('&lt;', '<') content3 = content2 ref = input('entre the no. of references:", "ro i = 3 while i != 100000: ArtName = raw_input('\\n\\ntype the name", "ref_dataa = content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data", "3 while i != 100000: ArtName = raw_input('\\n\\ntype the name of article from", "i+=1 # to write reference -- END print \"'''\" + ArtName + \"'''\\n\"", "write reference -- END print \"'''\" + ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'),", "\"'''\" + ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print", "ro == \"\": ro = word return ro i = 3 while i", "= as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print text after reference as it", "content2 ref = input('entre the no. of references: ') i = 0 while", "ref_data[0] + '/>' content2 = content2.replace(ref_data, '') ref_no = '[' + str(i+1) +", "ref_data = '' ref_data = '<ref' + ref_data + '</ref>' if '/>' in", "from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" +", "i != ref: if '<ref' in content2: ref_dataa = content2.split('<ref', 1) if '</ref>'", "art = re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the text in to", "re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the text in to Odia #", "goslate def trans(word): gs = goslate.Goslate() ro = gs.translate(word, 'or') if ro ==", "urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls request = urllib2.Request(urls) handle", "1) ref_data = '<ref' + ref_data[0] + '/>' content2 = content2.replace(ref_data, '') ref_no", "to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text after reference as it is --", "= re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the text in to Odia", "content2[1].split('</textarea>', 1) content2 = content2[0] content2 = content2.replace('&lt;', '<') content3 = content2 ref", "re import goslate def trans(word): gs = goslate.Goslate() ro = gs.translate(word, 'or') if", "while i != ref: if '<ref' in content2: ref_dataa = content2.split('<ref', 1) if", "as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is =", "+ ' found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>',", "write reference --BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2", "= input('entre the no. of references: ') i = 0 while i !=", "in content2: ref_dataa = content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>',", "ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0] + '/>' content2 = content2.replace(ref_data, '')", "= art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art)", "as it is -- BEGIN as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is", "handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>',", "class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art) to_print =", "working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art =", "print '\\n==References==\\n', as_it_is #to print text after reference as it is -- END", "art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate", "\"&action=edit\" print urls request = urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read() if", "english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName", "= urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2", "content2[0] content2 = content2.replace('&lt;', '<') content3 = content2 ref = input('entre the no.", "coding: utf-8 -*- import urllib2 import re import goslate def trans(word): gs =", "in content: print '\\n\\ndata about ' + ArtName + ' found... working...\\n\\n' splitted_page", "'<ref' + ref_data + '</ref>' if '/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1)", "= to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text after reference as it is", "it is -- BEGIN as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is =", "ArtName = raw_input('\\n\\ntype the name of article from english wikipedia: ') urls =", "= art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art", "id=\"References\">References</span>', 1) art = splitted_page[0] art = art.replace('</p>', '\\n') art = art.replace('&#160;', '')", "content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0]", "as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print text after reference as it is", "ref = input('entre the no. of references: ') i = 0 while i", "urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print", "print '\\n\\ndata about ' + ArtName + ' found... working...\\n\\n' splitted_page = content.split('<p>',", "class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art = art.replace('</p>', '\\n') art = art.replace('&#160;',", "= 0 while i != ref: if '<ref' in content2: ref_dataa = content2.split('<ref',", "return ro i = 3 while i != 100000: ArtName = raw_input('\\n\\ntype the", "+ ArtName + ' found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span", "') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\"", "content2 = content2.replace(ref_data, '') ref_no = '[' + str(i+1) + ']' to_print =", "re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the text", "#to print text after reference as it is -- BEGIN as_it_is = content3.split('==References==',", "i != 100000: ArtName = raw_input('\\n\\ntype the name of article from english wikipedia:", "goslate.Goslate() ro = gs.translate(word, 'or') if ro == \"\": ro = word return", "ref_data.decode('utf-8')) i+=1 # to write reference -- END print \"'''\" + ArtName +", "in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data = ''", "= gs.translate(word, 'or') if ro == \"\": ro = word return ro i", "= splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art = art.replace('</p>', '\\n') art", "= \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls request = urllib2.Request(urls) handle =", "ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data = '' ref_data", "ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data = '' ref_data = '<ref' +", "class=\"infobox' in content: print '\\n\\ndata about ' + ArtName + ' found... working...\\n\\n'", "article from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\"", "= as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is", "\"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text after reference as", "else: ref_data = '' ref_data = '<ref' + ref_data + '</ref>' if '/>'", "--BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">',", "!= ref: if '<ref' in content2: ref_dataa = content2.split('<ref', 1) if '</ref>' in", "\"\": ro = word return ro i = 3 while i != 100000:", "']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference -- END print", "+ ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text", "str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference --", "= '<ref' + ref_data[0] + '/>' content2 = content2.replace(ref_data, '') ref_no = '['", "as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print text after", "as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print", "= ref_data[0] else: ref_data = '' ref_data = '<ref' + ref_data + '</ref>'", "'') ref_no = '[' + str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1", "'\\n') art = art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','',", "1) content2 = content2[1].split('</textarea>', 1) content2 = content2[0] content2 = content2.replace('&lt;', '<') content3", "= ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0] + '/>' content2 = content2.replace(ref_data,", "= raw_input('\\n\\ntype the name of article from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\"", "'</ref>' if '/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref' +", "content2 = content2[1].split('</textarea>', 1) content2 = content2[0] content2 = content2.replace('&lt;', '<') content3 =", "to write reference --BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read()", "if '/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0]", "art = splitted_page[0] art = art.replace('</p>', '\\n') art = art.replace('&#160;', '') art =", "art = art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','',", "content3 = content2 ref = input('entre the no. of references: ') i =", "'==\\n') print to_print #to print text after reference as it is -- BEGIN", "\"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls request", "'/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0] +", "ArtName + \"&action=edit\" print urls request = urllib2.Request(urls) handle = urllib2.urlopen(request) content =", "as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print text after reference as", "1) ref_data = ref_data[0] else: ref_data = '' ref_data = '<ref' + ref_data", "BEGIN as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is", "content2.replace(ref_data, '') ref_no = '[' + str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8'))", "art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art) to_print", "input('entre the no. of references: ') i = 0 while i != ref:", "#to print text after reference as it is -- END else: print 'escape',", "while i != 100000: ArtName = raw_input('\\n\\ntype the name of article from english", "' found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1)", "the text in to Odia # to write reference --BEGIN request2 = urllib2.Request(urls2)", "print text after reference as it is -- END else: print 'escape', i+1", "to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference -- END print \"'''\" + ArtName", "ref_data = ref_data[0] else: ref_data = '' ref_data = '<ref' + ref_data +", "END print \"'''\" + ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print", "if '<table class=\"infobox' in content: print '\\n\\ndata about ' + ArtName + '", "to Odia # to write reference --BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2)", "content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2 = content2[0] content2 = content2.replace('&lt;', '<')", "request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1)", "= content2 ref = input('entre the no. of references: ') i = 0", "as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to", "if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data", "+ ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference -- END", "+ ref_data + '</ref>' if '/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data", "'/>' content2 = content2.replace(ref_data, '') ref_no = '[' + str(i+1) + ']' to_print", "'or') if ro == \"\": ro = word return ro i = 3", "= ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data = '' ref_data = '<ref'", "handle = urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox' in content: print '\\n\\ndata", "'' ref_data = '<ref' + ref_data + '</ref>' if '/>' in ref_data: ref_data", "'</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data =", "as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n',", "to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference -- END print \"'''\"", "= '' ref_data = '<ref' + ref_data + '</ref>' if '/>' in ref_data:", "= re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the", "text in to Odia # to write reference --BEGIN request2 = urllib2.Request(urls2) handle2", "1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print", "= handle.read() if '<table class=\"infobox' in content: print '\\n\\ndata about ' + ArtName", "references: ') i = 0 while i != ref: if '<ref' in content2:", "ref: if '<ref' in content2: ref_dataa = content2.split('<ref', 1) if '</ref>' in ref_dataa[1]:", "content2 = content2[0] content2 = content2.replace('&lt;', '<') content3 = content2 ref = input('entre", "i = 0 while i != ref: if '<ref' in content2: ref_dataa =", "'\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print text after reference", "'\\n==<') art = re.sub('\\<.*?\\>','', art) art = re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8'))", "= handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2 = content2[0]", "if '<ref' in content2: ref_dataa = content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data", "urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2", "+ '/>' content2 = content2.replace(ref_data, '') ref_no = '[' + str(i+1) + ']'", "no. of references: ') i = 0 while i != ref: if '<ref'", "urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox' in content: print '\\n\\ndata about '", "ArtName + ' found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\"", "splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art = art.replace('</p>', '\\n')", "ro = gs.translate(word, 'or') if ro == \"\": ro = word return ro", "+ ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls request =", "if ro == \"\": ro = word return ro i = 3 while", "urls request = urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox'", "urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox' in content: print", "= '[' + str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to", "content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '')", "ro = word return ro i = 3 while i != 100000: ArtName", "ref_no = '[' + str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 #", "as_it_is #to print text after reference as it is -- END else: print", "to write reference -- END print \"'''\" + ArtName + \"'''\\n\" to_print =", "= urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1)", "# to write reference -- END print \"'''\" + ArtName + \"'''\\n\" to_print", "python # -*- coding: utf-8 -*- import urllib2 import re import goslate def", "= word return ro i = 3 while i != 100000: ArtName =", "art = art.replace('</p>', '\\n') art = art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<')", "text after reference as it is -- BEGIN as_it_is = content3.split('==References==', 1) as_it_is", "= content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;',", "= to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference -- END print \"'''\" +", "#!/usr/bin/env python # -*- coding: utf-8 -*- import urllib2 import re import goslate", "= content2[1].split('</textarea>', 1) content2 = content2[0] content2 = content2.replace('&lt;', '<') content3 = content2", "= 3 while i != 100000: ArtName = raw_input('\\n\\ntype the name of article", "1) art = splitted_page[0] art = art.replace('</p>', '\\n') art = art.replace('&#160;', '') art", "+ str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write reference", "splitted_page[0] art = art.replace('</p>', '\\n') art = art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"',", "= content2.replace(ref_data, '') ref_no = '[' + str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'),", "art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art) art =", "ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text after", "== \"\": ro = word return ro i = 3 while i !=", "ref_data = '<ref' + ref_data[0] + '/>' content2 = content2.replace(ref_data, '') ref_no =", "+ ArtName + \"&action=edit\" print urls request = urllib2.Request(urls) handle = urllib2.urlopen(request) content", "' + ArtName + ' found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page =", "= urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox' in content:", "'<table class=\"infobox' in content: print '\\n\\ndata about ' + ArtName + ' found...", "'<ref' in content2: ref_dataa = content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data =", "in to Odia # to write reference --BEGIN request2 = urllib2.Request(urls2) handle2 =", "content2.replace('&lt;', '<') content3 = content2 ref = input('entre the no. of references: ')", "+ \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text after reference", "utf-8 -*- import urllib2 import re import goslate def trans(word): gs = goslate.Goslate()", "\"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls request = urllib2.Request(urls) handle = urllib2.urlopen(request)", "ref_data + '</ref>' if '/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data =", "content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art =", "= content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2 = content2[0] content2 = content2.replace('&lt;',", "Odia # to write reference --BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2", "the name of article from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName", "'<ref' + ref_data[0] + '/>' content2 = content2.replace(ref_data, '') ref_no = '[' +", "0 while i != ref: if '<ref' in content2: ref_dataa = content2.split('<ref', 1)", "+ ref_data[0] + '/>' content2 = content2.replace(ref_data, '') ref_no = '[' + str(i+1)", "-*- import urllib2 import re import goslate def trans(word): gs = goslate.Goslate() ro", "!= 100000: ArtName = raw_input('\\n\\ntype the name of article from english wikipedia: ')", "content: print '\\n\\ndata about ' + ArtName + ' found... working...\\n\\n' splitted_page =", "art.replace('</p>', '\\n') art = art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art =", "#translate the text in to Odia # to write reference --BEGIN request2 =", "urllib2 import re import goslate def trans(word): gs = goslate.Goslate() ro = gs.translate(word,", "reference -- END print \"'''\" + ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n')", "print to_print #to print text after reference as it is -- BEGIN as_it_is", "100000: ArtName = raw_input('\\n\\ntype the name of article from english wikipedia: ') urls", "'\\n\\ndata about ' + ArtName + ' found... working...\\n\\n' splitted_page = content.split('<p>', 1);", "= goslate.Goslate() ro = gs.translate(word, 'or') if ro == \"\": ro = word", "gs.translate(word, 'or') if ro == \"\": ro = word return ro i =", "splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art = art.replace('</p>', '\\n') art =", "content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2 = content2[0] content2 =", "'') print '\\n==References==\\n', as_it_is #to print text after reference as it is --", "= as_it_is.replace('</p>', '\\n') as_it_is = as_it_is.replace('&#160;', '') print '\\n==References==\\n', as_it_is #to print text", "word return ro i = 3 while i != 100000: ArtName = raw_input('\\n\\ntype", "= urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox' in content: print '\\n\\ndata about", "after reference as it is -- BEGIN as_it_is = content3.split('==References==', 1) as_it_is =", "request = urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read() if '<table class=\"infobox' in", "1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art = art.replace('</p>',", "ref_data = '<ref' + ref_data + '</ref>' if '/>' in ref_data: ref_data =", "to_print = trans(art.decode('utf-8')) #translate the text in to Odia # to write reference", "raw_input('\\n\\ntype the name of article from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" +", "# to write reference --BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 =", "handle.read() if '<table class=\"infobox' in content: print '\\n\\ndata about ' + ArtName +", "about ' + ArtName + ' found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page", "splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0]", "ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls request = urllib2.Request(urls)", "in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0] + '/>'", "reference --BEGIN request2 = urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2 =", "content2 = content2.replace('&lt;', '<') content3 = content2 ref = input('entre the no. of", "is -- BEGIN as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1] as_it_is = as_it_is.replace('</p>',", "ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0] + '/>' content2 =", "handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2 = content2[0] content2", "art = art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art = re.sub('\\<.*?\\>','', art)", "trans(art.decode('utf-8')) #translate the text in to Odia # to write reference --BEGIN request2", "-- END print \"'''\" + ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print", "import goslate def trans(word): gs = goslate.Goslate() ro = gs.translate(word, 'or') if ro", "wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName +", "print urls request = urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read() if '<table", "of references: ') i = 0 while i != ref: if '<ref' in", "text after reference as it is -- END else: print 'escape', i+1 i+=1", "to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to print text after reference as it", "print text after reference as it is -- BEGIN as_it_is = content3.split('==References==', 1)", "content = handle.read() if '<table class=\"infobox' in content: print '\\n\\ndata about ' +", "= '<ref' + ref_data + '</ref>' if '/>' in ref_data: ref_data = ref_dataa[1].split('/>',", "name of article from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2", "content2: ref_dataa = content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1)", "def trans(word): gs = goslate.Goslate() ro = gs.translate(word, 'or') if ro == \"\":", "= trans(art.decode('utf-8')) #translate the text in to Odia # to write reference --BEGIN", "1) content2 = content2[0] content2 = content2.replace('&lt;', '<') content3 = content2 ref =", "the no. of references: ') i = 0 while i != ref: if", "= art.replace('</p>', '\\n') art = art.replace('&#160;', '') art = art.replace('<span class=\"mw-headline\"', '\\n==<') art", "+ '</ref>' if '/>' in ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref'", "') i = 0 while i != ref: if '<ref' in content2: ref_dataa", "print \"'''\" + ArtName + \"'''\\n\" to_print = to_print.replace('[ସମ୍ପାଦନା]'.decode('utf-8'), '==\\n') print to_print #to", "# -*- coding: utf-8 -*- import urllib2 import re import goslate def trans(word):", "= splitted_page[0] art = art.replace('</p>', '\\n') art = art.replace('&#160;', '') art = art.replace('<span", ".*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the text in to Odia # to", "= content2[0] content2 = content2.replace('&lt;', '<') content3 = content2 ref = input('entre the", "= \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 = \"https://en.wikipedia.org/w/index.php?title=\" + ArtName + \"&action=edit\" print urls", "art) to_print = trans(art.decode('utf-8')) #translate the text in to Odia # to write", "1) if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else:", "import urllib2 import re import goslate def trans(word): gs = goslate.Goslate() ro =", "'\\n==References==\\n', as_it_is #to print text after reference as it is -- END else:", "found... working...\\n\\n' splitted_page = content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art", "urllib2.Request(urls2) handle2 = urllib2.urlopen(request2) content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 =", "ref_data[0] else: ref_data = '' ref_data = '<ref' + ref_data + '</ref>' if", "reference as it is -- BEGIN as_it_is = content3.split('==References==', 1) as_it_is = as_it_is[1]", "art) art = re.sub('\\ox .*?\\em\">','', art) to_print = trans(art.decode('utf-8')) #translate the text in", "ref_data: ref_data = ref_dataa[1].split('/>', 1) ref_data = '<ref' + ref_data[0] + '/>' content2", "-*- coding: utf-8 -*- import urllib2 import re import goslate def trans(word): gs", "'<') content3 = content2 ref = input('entre the no. of references: ') i", "of article from english wikipedia: ') urls = \"\\n\\nhttps://en.wikipedia.org/wiki/\" + ArtName urls2 =", "content2 = handle2.read() content2 = content2.split('name=\"wpTextbox1\">', 1) content2 = content2[1].split('</textarea>', 1) content2 =", "gs = goslate.Goslate() ro = gs.translate(word, 'or') if ro == \"\": ro =", "= content2.split('<ref', 1) if '</ref>' in ref_dataa[1]: ref_data = ref_dataa[1].split('</ref>', 1) ref_data =", "ref_data = ref_dataa[1].split('</ref>', 1) ref_data = ref_data[0] else: ref_data = '' ref_data =", "'[' + str(i+1) + ']' to_print = to_print.replace(ref_no.decode('utf-8'), ref_data.decode('utf-8')) i+=1 # to write", "to_print #to print text after reference as it is -- BEGIN as_it_is =", "+ \"&action=edit\" print urls request = urllib2.Request(urls) handle = urllib2.urlopen(request) content = handle.read()", "import re import goslate def trans(word): gs = goslate.Goslate() ro = gs.translate(word, 'or')", "= content.split('<p>', 1); splitted_page = splitted_page[1].split('<span class=\"mw-headline\" id=\"References\">References</span>', 1) art = splitted_page[0] art", "trans(word): gs = goslate.Goslate() ro = gs.translate(word, 'or') if ro == \"\": ro" ]
[ "/ (1.0 + 10 ** ((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float,", "pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2 = (1.0 / (1.0 + pow(10,", "((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2), (1 - isWinPlayer1 -", "-> float: P1 = (1.0 / (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) /", "bool) -> float: P1 = (1.0 / (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2)", "(1.0 / (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1 -", "ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) -> float: P1 = (1.0 / (1.0", "+ pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2), (1 -", "= (1.0 / (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1", "** ((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool)", "+ pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2 = (1.0 / (1.0 +", "rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) -> float: P1", "#return (isWinPlayer1 - P2), (1 - isWinPlayer1 - P1) return (isWinPlayer1 - P2)", "__future__ import annotations def GetWinningProbability(rating1: float, rating2: float): return 1.0 / (1.0 +", "ratingPlayer2: float, isWinPlayer1: bool) -> float: P1 = (1.0 / (1.0 + pow(10,", "P2 = (1.0 / (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return", "(1.0 + 10 ** ((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2:", "float: P1 = (1.0 / (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) / 400))))", "return 1.0 / (1.0 + 10 ** ((rating1 - rating2) / 400)) def", "import annotations def GetWinningProbability(rating1: float, rating2: float): return 1.0 / (1.0 + 10", "400)))) #return (isWinPlayer1 - P2), (1 - isWinPlayer1 - P1) return (isWinPlayer1 -", "float, rating2: float): return 1.0 / (1.0 + 10 ** ((rating1 - rating2)", "400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) -> float: P1 = (1.0", "10 ** ((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1:", "def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) -> float: P1 = (1.0 /", "pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2), (1 - isWinPlayer1", "((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) ->", "(1.0 / (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2 = (1.0", "- ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2), (1 - isWinPlayer1 - P1)", "P1 = (1.0 / (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2", "/ (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2),", "+ 10 ** ((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float,", "float, isWinPlayer1: bool) -> float: P1 = (1.0 / (1.0 + pow(10, ((ratingPlayer1", "- rating2) / 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) -> float:", "annotations def GetWinningProbability(rating1: float, rating2: float): return 1.0 / (1.0 + 10 **", "float, ratingPlayer2: float, isWinPlayer1: bool) -> float: P1 = (1.0 / (1.0 +", "GetWinningProbability(rating1: float, rating2: float): return 1.0 / (1.0 + 10 ** ((rating1 -", "float): return 1.0 / (1.0 + 10 ** ((rating1 - rating2) / 400))", "= (1.0 / (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2 =", "/ 400)) def ComputeDeltaRating(ratingPlayer1: float, ratingPlayer2: float, isWinPlayer1: bool) -> float: P1 =", "(1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2), (1", "rating2: float): return 1.0 / (1.0 + 10 ** ((rating1 - rating2) /", "/ 400)))) P2 = (1.0 / (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) /", "ratingPlayer2) / 400)))) P2 = (1.0 / (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1)", "/ 400)))) #return (isWinPlayer1 - P2), (1 - isWinPlayer1 - P1) return (isWinPlayer1", "/ (1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2 = (1.0 /", "ratingPlayer1) / 400)))) #return (isWinPlayer1 - P2), (1 - isWinPlayer1 - P1) return", "(1.0 + pow(10, ((ratingPlayer1 - ratingPlayer2) / 400)))) P2 = (1.0 / (1.0", "- ratingPlayer2) / 400)))) P2 = (1.0 / (1.0 + pow(10, ((ratingPlayer2 -", "def GetWinningProbability(rating1: float, rating2: float): return 1.0 / (1.0 + 10 ** ((rating1", "1.0 / (1.0 + 10 ** ((rating1 - rating2) / 400)) def ComputeDeltaRating(ratingPlayer1:", "400)))) P2 = (1.0 / (1.0 + pow(10, ((ratingPlayer2 - ratingPlayer1) / 400))))", "((ratingPlayer1 - ratingPlayer2) / 400)))) P2 = (1.0 / (1.0 + pow(10, ((ratingPlayer2", "isWinPlayer1: bool) -> float: P1 = (1.0 / (1.0 + pow(10, ((ratingPlayer1 -", "from __future__ import annotations def GetWinningProbability(rating1: float, rating2: float): return 1.0 / (1.0" ]
[ ", before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] =", "= datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename", "+\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month", "f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON", "mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await", "# 特になし pass self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月) async def voice_outputlog(self,", ": # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から", ": f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode())", "m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text = \"【一か月定期連絡】\"+ m_year", "self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入", "= before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] =", ", CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData is not None : send_fileName", "config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist =", "__init__(self) : super().__init__() self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row +", "から勝手に拝借 def append_json_to_file(self, _dict, path_file): try : with open(path_file, 'ab+') as f: #", "await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData is not", "0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else : f.seek(-1,2) #", "= False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください", "discord.Member, before: discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict() if before.channel is None:", "flag : sendfile = await self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month =", "%H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが入りました\")", ", member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id", "return send_fileName else : return None #async def on_message(self, config, client: discord.Client, message:", "'01 00') else : # 1時に実行する flag = CTime.check('%M', '00') # -- 出力処理", "入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name , \"#\" ,", "base.time_check as CTime import os import collections as cl from datetime import datetime,", "channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist ,", "import datetime, timedelta from dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main as CSetting", "datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\"", "flag = False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False # ---", "%H', '01 00') else : # 1時に実行する flag = CTime.check('%M', '00') # --", "# 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する", "data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"]", "as base import base.DiscordSend as Sendtool import base.ColorPrint as CPrint import base.time_check as", "= \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] =", ", before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] =", "## 入退室監視 async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState ,", "async def voice_outputlog(self, config, client: discord.Client): channellist = [] if config.get(\"on_task\") is not", ": return None #async def on_message(self, config, client: discord.Client, message: discord.Message) : #sendfile", "message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async", "member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id", "await self.MonthOutput(client=client) #if sendfile is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else", "出力処理 -- if flag : sendfile = await self.MonthOutput(client=client) filetime = today -", "# ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open(", "data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"]", "data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"]", "Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData is not None", "CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try : with", "m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath) ==", "data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"]", "+ CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return", "ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む", "#await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## -------- flag = False #", "JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try : with open(path_file, 'ab+')", "# ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1", "CSVで加工済みを保存する if timeData is not None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis +", "None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## -------- flag", "m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder", "'{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath)", "except OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() #", "None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借", "入退室監視 async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState , after:", "datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name , \"#\" , member.discriminator ,", "\"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id", "data = cl.OrderedDict() if before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\"", ": def __init__(self) : super().__init__() self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder +", "voice_outputlog\") ## -------- flag = False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag =", "if channellist is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\")", "ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath", "today = datetime.today() filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m')", "member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass self.append_json_to_file( data, self.now_filepath", ") # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client,", "with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID", "on_message(self, config, client: discord.Client, message: discord.Message) : #sendfile = await self.MonthOutput(client=client) #if sendfile", "False : # 1日に実行する flag = CTime.check('%d %H', '01 00') else : #", "channellist = [] if config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not", "Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData", "辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError as e: CPrint.error_print( path_file", "self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\")", "配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError as", "mode=\"NAME\") # CSVで加工済みを保存する if timeData is not None : send_fileName = CSetting.baseLogFolder +", "return None #async def on_message(self, config, client: discord.Client, message: discord.Message) : #sendfile =", "= before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] =", "not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if", "\"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name", "as CTime import os import collections as cl from datetime import datetime, timedelta", "data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"]", ", \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"]", "抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator", "Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await", "os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData =", "elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" ,", "member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ##", "Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## -------- flag = False # 動作時間決定", "discord.Client, message: discord.Message) : #sendfile = await self.MonthOutput(client=client) #if sendfile is None :", "配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) #", "Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self, config, client: discord.Client, member:", "f.write(']'.encode()) # JSON 配列を閉じる except OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd())", "- relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year,", "# await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass", "# -- 出力処理 -- if flag : sendfile = await self.MonthOutput(client=client) filetime =", "as pd class command(base.command_base) : def __init__(self) : super().__init__() self.test_task: tasks.Loop = None", "= await self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year =", "relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename =", "00') else : # 1時に実行する flag = CTime.check('%M', '00') # -- 出力処理 --", ": sendfile = await self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m')", "CSetting.JSONPATH_row + month_filename + \".json\" if sendfile is None : text = \"【一か月定期連絡】\"+", "collections as cl from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta import", "f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client):", "data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id !=", ": # 特になし pass self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月) async def", "+ \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き)", "is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\")", "文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を", "if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None", "Chart import pandas as pd class command(base.command_base) : def __init__(self) : super().__init__() self.test_task:", "print(e) return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self,", "def __init__(self) : super().__init__() self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row", "send_fileName else : return None #async def on_message(self, config, client: discord.Client, message: discord.Message)", "Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month", "TestFlag == False : # 1日に実行する flag = CTime.check('%d %H', '01 00') else", "async def MonthOutput(self, client: discord.Client): today = datetime.today() filetime = today - relativedelta(months=1)", "= datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name,", "== False : # 1日に実行する flag = CTime.check('%d %H', '01 00') else :", "after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name", "datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row", ": super().__init__() self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now", "else : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID,", "else : # 1時に実行する flag = CTime.check('%M', '00') # -- 出力処理 -- if", "# 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError as e: CPrint.error_print(", "CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if sendfile is None : text", "\".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename", "not None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv(", "+ month_filename + \".json\" if sendfile is None : text = \"【一か月定期連絡】\"+ m_year", "= member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\")", "-1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) #", "None : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID,", "data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass self.append_json_to_file(", "send_fileName ) return send_fileName else : return None #async def on_message(self, config, client:", "self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def", "month_filename + \".json\" if sendfile is None : text = \"【一か月定期連絡】\"+ m_year +", "channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\"", "#if sendfile is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : #", "base.DiscordSend as Sendtool import base.ColorPrint as CPrint import base.time_check as CTime import os", "member.name , \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name", "m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+", "import base.DiscordSend as Sendtool import base.ColorPrint as CPrint import base.time_check as CTime import", "\"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id", "# 定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client): channellist = [] if config.get(\"on_task\")", "\"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\"", "# Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename", "is not None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\"", "is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## --------", "json import command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart import pandas as pd", "= datetime.today() filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year", "is None : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client,", ", '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる", ",\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"]", "sendfile = await self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year", "import relativedelta import json import command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart import", "\"から\" , member.name , \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"]", "config, client: discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict()", "+ CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try :", "after: discord.VoiceState): data = cl.OrderedDict() if before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d", "data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"]", "形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\")", "## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try : with open(path_file, 'ab+') as", ", after.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] =", "f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は", "'.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except", "%H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが移動しました\")", "- relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename", "= cl.OrderedDict() if before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" ,", "if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename )", "\"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator", "= [] if config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None", "指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False :", "'00') # -- 出力処理 -- if flag : sendfile = await self.MonthOutput(client=client) filetime", "message=\"TASKCheck! - voice_outputlog\") ## -------- flag = False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。", "super().__init__() self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now #", "from dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main as CSetting import command.voice_log.chart as", "await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ##", "= member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id", "if sendfile is None : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\"", "datetime, timedelta from dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main as CSetting import", "sendfile is None : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await", "e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open, Close", "CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName ) return send_fileName else", ", member.name , \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] =", ", member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\"", "しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today = datetime.today() filetime =", "CTime import os import collections as cl from datetime import datetime, timedelta from", "= config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck!", "def append_json_to_file(self, _dict, path_file): try : with open(path_file, 'ab+') as f: # ファイルを開く", "self.now_filepath ) pass # 定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client): channellist =", "--- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False : # 1日に実行する flag = CTime.check('%d", "# 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today", "= None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913", "※ 指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False", "+ CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file):", "False: # ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with", "\".json\" if sendfile is None : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month", "tasks.Loop = None self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ##", "= before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] =", "if flag : sendfile = await self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month", "= \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] =", "None # Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass #", "_dict, path_file): try : with open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2) #", "+ \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year", "data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\"", "path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも #", ": send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName )", "= \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=sendfile) pass", "cl from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta import json import", "OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度", "is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message,", "pandas as pd class command(base.command_base) : def __init__(self) : super().__init__() self.test_task: tasks.Loop =", "+ month_filename + \".csv\" timeData.to_csv( send_fileName ) return send_fileName else : return None", "= await self.MonthOutput(client=client) #if sendfile is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None)", "as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open,", "+ \".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath,", ": text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text,", "self.MonthOutput(client=client) #if sendfile is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else :", "# --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False : # 1日に実行する flag =", "import command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart import pandas as pd class", ": # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile)", "message=text, filename=None) else : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await", "= CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if sendfile is None :", "command.voice_log.chart as Chart import pandas as pd class command(base.command_base) : def __init__(self) :", "CPrint import base.time_check as CTime import os import collections as cl from datetime", "mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath) == False:", "discord.Message) : #sendfile = await self.MonthOutput(client=client) #if sendfile is None : # await", "= \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] =", "+ \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text =", "= '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if", "send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName ) return", "self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client):", "CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも", "None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName", "config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None :", "= member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif", "if config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist", "command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart import pandas as pd class command(base.command_base)", "timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData", "dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart", "# 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError", "# Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る", "CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None", "#else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self,", "before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator", "member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"]", "+\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text = \"【一か月定期連絡】\"+ m_year +", "data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : #", ", after: discord.VoiceState): data = cl.OrderedDict() if before.channel is None: ## 入ってきたら print(", ": return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## -------- flag =", "= CTime.check('%M', '00') # -- 出力処理 -- if flag : sendfile = await", "CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict,", ", \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] =", "特になし pass self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月) async def voice_outputlog(self, config,", "\"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=sendfile) pass pass", "= after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] =", "CTime.check('%M', '00') # -- 出力処理 -- if flag : sendfile = await self.MonthOutput(client=client)", "= \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else", "timeData is not None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename +", "== False: # ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく", "else : return None #async def on_message(self, config, client: discord.Client, message: discord.Message) :", "not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return #await", "append_json_to_file(self, _dict, path_file): try : with open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2)", "data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"]", "datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass self.append_json_to_file( data, self.now_filepath ) pass #", ", message=\"TASKCheck! - voice_outputlog\") ## -------- flag = False # 動作時間決定 # ※", "Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename =", "base import base.DiscordSend as Sendtool import base.ColorPrint as CPrint import base.time_check as CTime", "\"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] =", "path_file): try : with open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0)", "text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None)", "text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=sendfile)", "today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename =", "discord from discord.ext import tasks import base.command_base as base import base.DiscordSend as Sendtool", "空の場合は JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON", "#async def on_message(self, config, client: discord.Client, message: discord.Message) : #sendfile = await self.MonthOutput(client=client)", "None #async def on_message(self, config, client: discord.Client, message: discord.Message) : #sendfile = await", "# JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try : with open(path_file,", "datetime.today() filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year =", "before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator", "client: discord.Client): channellist = [] if config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name)", "= before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] =", ": text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text,", "os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None # Rename os.rename(self.now_filepath, mv_filename ) #", "import base.time_check as CTime import os import collections as cl from datetime import", "f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) #", "\"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id", "open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0", "is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name ,", "動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag", "= after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] =", "m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row +", "JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除)", "= datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass self.append_json_to_file( data, self.now_filepath ) pass", "\"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name", "as cl from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta import json", "config, client: discord.Client, message: discord.Message) : #sendfile = await self.MonthOutput(client=client) #if sendfile is", "def on_message(self, config, client: discord.Client, message: discord.Message) : #sendfile = await self.MonthOutput(client=client) #if", "= datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" ,", "f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : # ファイルが空かチェック", "data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\")", ") return send_fileName else : return None #async def on_message(self, config, client: discord.Client,", "# ※ 指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag ==", "else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' ,", "if before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\"", "CTime.check('%d %H', '01 00') else : # 1時に実行する flag = CTime.check('%M', '00') #", "None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist", "== 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else : f.seek(-1,2)", "channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await Sendtool.Send_ChannelID(client=client,", ": with open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell()", "member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel", "m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text", "Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today = datetime.today() filetime", "print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name , \"#\" , member.discriminator", "\"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] =", "%H:%M:%S\") else : # 特になし pass self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月)", ",\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"]", "timedelta from dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main as CSetting import command.voice_log.chart", "data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"]", "member.name , \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name", ", member.name , \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] =", "return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## -------- flag = False", "async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState):", "datetime import datetime, timedelta from dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main as", "before: discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict() if before.channel is None: ##", "before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" ,", "member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\")", "\"から\" , member.name , \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"]", "data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ## 抜けたら", ", \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] =", "await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text = \"【一か月定期連絡】\"+ m_year + \"年\"+", "\"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text =", "as Sendtool import base.ColorPrint as CPrint import base.time_check as CTime import os import", "await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\"", ", \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] =", "after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id", ", \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"]", "self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename", "配列を閉じる except OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close()", "\"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] =", "discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict() if before.channel", "data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"]", "def voice_outputlog(self, config, client: discord.Client): channellist = [] if config.get(\"on_task\") is not None", "tasks import base.command_base as base import base.DiscordSend as Sendtool import base.ColorPrint as CPrint", "= today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename", "f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError as e:", "%H:%M:%S\") elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\"", "month_filename + \".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None # Rename", "# CSVで加工済みを保存する if timeData is not None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis", "= \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] =", "# 1時に実行する flag = CTime.check('%M', '00') # -- 出力処理 -- if flag :", "return None # Rename os.rename(self.now_filepath, mv_filename ) # now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass", "command(base.command_base) : def __init__(self) : super().__init__() self.test_task: tasks.Loop = None self.now_filepath = CSetting.baseLogFolder", "sys import discord from discord.ext import tasks import base.command_base as base import base.DiscordSend", "member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else :", "data, self.now_filepath ) pass # 定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client): channellist", "voice_outputlog(self, config, client: discord.Client): channellist = [] if config.get(\"on_task\") is not None :", "channellist is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ##", "- voice_outputlog\") ## -------- flag = False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag", "def MonthOutput(self, client: discord.Client): today = datetime.today() filetime = today - relativedelta(months=1) #", "JSON 配列を閉じる except OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e) return", "member: discord.Member, before: discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict() if before.channel is", "data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass self.append_json_to_file( data, self.now_filepath )", "pd class command(base.command_base) : def __init__(self) : super().__init__() self.test_task: tasks.Loop = None self.now_filepath", "CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData is not None : send_fileName =", "= member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass self.append_json_to_file( data,", "-------------- if TestFlag == False : # 1日に実行する flag = CTime.check('%d %H', '01", "print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator ,", "連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today =", "data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" ,", "# JSON 配列を閉じる except OSError as e: CPrint.error_print( path_file + \"が、存在しませんでした\") print(os.getcwd()) print(e)", "today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month)", "filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def", "message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID,", "if timeData is not None : send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename", "data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d", "as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : #", "定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client): channellist = [] if config.get(\"on_task\") is", "data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"]", "+ \".csv\" timeData.to_csv( send_fileName ) return send_fileName else : return None #async def", "member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"]", "False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False : # 1日に実行する flag", "as CSetting import command.voice_log.chart as Chart import pandas as pd class command(base.command_base) :", "after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d", "# 1日に実行する flag = CTime.check('%d %H', '01 00') else : # 1時に実行する flag", "m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if sendfile is", "from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta import json import command.voice_log.Config_Main", "%H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが抜けました\")", "client: discord.Client, message: discord.Message) : #sendfile = await self.MonthOutput(client=client) #if sendfile is None", "filetime = today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename =", "base.ColorPrint as CPrint import base.time_check as CTime import os import collections as cl", "= \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] =", "f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate()", "message: discord.Message) : #sendfile = await self.MonthOutput(client=client) #if sendfile is None : #", "await self.MonthOutput(client=client) filetime = today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y')", "member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし pass", ", \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"]", "def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState): data", "is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name", "data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"]", "before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name", "\"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year +", "filename=None) else : text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client,", "= \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] =", "https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try : with open(path_file, 'ab+') as f:", "= member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None:", "[] if config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None :", "data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\" data[\"after.channel.id\"] = \"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"]", "now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename ,", "else : # 特になし pass self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月) async", "after.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\"", "1日に実行する flag = CTime.check('%d %H', '01 00') else : # 1時に実行する flag =", "配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write('", "text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename)", "as CPrint import base.time_check as CTime import os import collections as cl from", "1時に実行する flag = CTime.check('%M', '00') # -- 出力処理 -- if flag : sendfile", "# 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み", "import pandas as pd class command(base.command_base) : def __init__(self) : super().__init__() self.test_task: tasks.Loop", ": # 1時に実行する flag = CTime.check('%M', '00') # -- 出力処理 -- if flag", "timeData.to_csv( send_fileName ) return send_fileName else : return None #async def on_message(self, config,", "= await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData is", "return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client:", "pass self.append_json_to_file( data, self.now_filepath ) pass # 定期送信(1ヵ月) async def voice_outputlog(self, config, client:", "JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON 配列を閉じる except OSError as e: CPrint.error_print( path_file +", "after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name ,", "relativedelta import json import command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart import pandas", "discord.Client): channellist = [] if config.get(\"on_task\") is not None : if config[\"on_task\"].get(sys._getframe().f_code.co_name) is", "= today - relativedelta(months=1) m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year,", "= member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else : # 特になし", ": # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self, config,", "elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name", "sendfile is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await", "\"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else :", "filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before:", ": # 1日に実行する flag = CTime.check('%d %H', '01 00') else : # 1時に実行する", ", member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id", "before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが移動しました\") data[\"Flag\"] = \"move\"", "## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name , \"#\"", "CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く", "import base.command_base as base import base.DiscordSend as Sendtool import base.ColorPrint as CPrint import", "f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode()) # JSON", "#pass ## 入退室監視 async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState", "class command(base.command_base) : def __init__(self) : super().__init__() self.test_task: tasks.Loop = None self.now_filepath =", "# ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON", "ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動", "!= before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\"", "discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict() if before.channel is None: ## 入ってきたら", "+ CSetting.JSONPATH_row + month_filename + \".json\" if sendfile is None : text =", "base.command_base as base import base.DiscordSend as Sendtool import base.ColorPrint as CPrint import base.time_check", "ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode())", "= member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") else", "before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" ,", "+ \".json\" if sendfile is None : text = \"【一か月定期連絡】\"+ m_year + \"年\"+", "= \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text", ",\":\" , after.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"]", ": #sendfile = await self.MonthOutput(client=client) #if sendfile is None : # await Sendtool.Send_Member(Data=message,", "import command.voice_log.chart as Chart import pandas as pd class command(base.command_base) : def __init__(self)", "CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self, _dict, path_file): try", "%H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" ,", "# 空の場合は JSON 配列を書き込む else : f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動 f.truncate() #", "data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is", "client: discord.Client): today = datetime.today() filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month", "+ month_filename + \".json\" if os.path.exists(self.now_filepath) == False: # ここにエラー文を出して置く return None #", "-- 出力処理 -- if flag : sendfile = await self.MonthOutput(client=client) filetime = today", "加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if", "member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"]", "Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today = datetime.today()", ", member.name , \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] =", "os import collections as cl from datetime import datetime, timedelta from dateutil.relativedelta import", ": channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist", "定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False : # 1日に実行する flag = CTime.check('%d %H',", "-------- flag = False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False #", "discord.Client): today = datetime.today() filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month =", ", mode=\"NAME\") # CSVで加工済みを保存する if timeData is not None : send_fileName = CSetting.baseLogFolder", "if f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else", "\"NULL\" data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d", "datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name,", "filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+ m_month +\"月の音声チャンネルログイン加工データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text,", "# await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self, config, client:", "= \"NULL\" data[\"before.channel.id\"] = \"NULL\" data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] =", "TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False : #", "\"年\"+ m_month +\"月の音声チャンネルログインはありませんでした\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None) else : text = \"【一か月定期連絡】\"+", "+ CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName ) return send_fileName else :", "# ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode())", "## -------- flag = False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False", "month_filename + \".csv\" timeData.to_csv( send_fileName ) return send_fileName else : return None #async", "import os import collections as cl from datetime import datetime, timedelta from dateutil.relativedelta", "on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState): data =", "m_month +\"月の音声チャンネルログイン生データ\" await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename) text = \"【一か月定期連絡】\"+ m_year + \"年\"+", "f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む else :", "month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\"", "Sendtool import base.ColorPrint as CPrint import base.time_check as CTime import os import collections", "# 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if", "最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み f.write(']'.encode())", "member.name , \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"] = \"NULL\"", "= CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now # JSON記入 ## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借 def append_json_to_file(self,", "\".csv\" timeData.to_csv( send_fileName ) return send_fileName else : return None #async def on_message(self,", "message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member,", "await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視 async def on_voice_state_update(self, config, client: discord.Client,", "import json import command.voice_log.Config_Main as CSetting import command.voice_log.chart as Chart import pandas as", "None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\",", "with open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() ==", "from discord.ext import tasks import base.command_base as base import base.DiscordSend as Sendtool import", "discord.VoiceState): data = cl.OrderedDict() if before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\")", "pass # 定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client): channellist = [] if", "'ab+') as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 :", "try : with open(path_file, 'ab+') as f: # ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if", "open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID ,", "CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName ) return send_fileName else : return", "is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return", "\"から\" , member.name , \"#\" , member.discriminator , \"さんが入りました\") data[\"Flag\"] = \"entry\" data[\"before.channel.name\"]", "None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return #await Sendtool.Send_ChannelID(client=client,", "member.discriminator data[\"member.id\"] = member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id :", "member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel.id != before.channel.id : print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\"", "mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if sendfile is None", "\"さんが移動しました\") data[\"Flag\"] = \"move\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name", "JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today = datetime.today() filetime = today -", "False # 動作時間決定 # ※ 指定日時に動作できないので、これで代用。 TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください --------------", "#sendfile = await self.MonthOutput(client=client) #if sendfile is None : # await Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\",", "None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name, \"から\" , member.name ,", "data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name data[\"member.discriminator\"] = member.discriminator data[\"member.id\"] = member.id data[\"time\"]", "= CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if os.path.exists(self.now_filepath) == False: #", "cl.OrderedDict() if before.channel is None: ## 入ってきたら print( datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , after.channel.name,", "import tasks import base.command_base as base import base.DiscordSend as Sendtool import base.ColorPrint as", "import discord from discord.ext import tasks import base.command_base as base import base.DiscordSend as", "None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\"", "Sendtool.Send_Member(Data=message, message=\"ログファイルがありませんでした。\", filename=None) #else : # await Sendtool.Send_Member(Data=message, message=\"MonthOutput!\", filename=sendfile) #pass ## 入退室監視", "= member.id data[\"time\"] = datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") elif after.channel is None: ## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d", "channelID=channellist , message=\"TASKCheck! - voice_outputlog\") ## -------- flag = False # 動作時間決定 #", "ファイルを開く f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0) if f.tell() == 0 : # ファイルが空かチェック f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) #", "config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is None : return #await Sendtool.Send_ChannelID(client=client, channelID=channellist , message=\"TASKCheck! -", ": if config[\"on_task\"].get(sys._getframe().f_code.co_name) is not None : channellist = config[\"on_task\"][sys._getframe().f_code.co_name].get(\"message-channelID\") if channellist is", "-- if flag : sendfile = await self.MonthOutput(client=client) filetime = today - relativedelta(months=1)", "\"が、存在しませんでした\") print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async", "relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month)", "# ファイルの末尾(2)から -1 文字移動 f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除) f.write(' , '.encode()) # 配列のセパレーターを書き込む", "import collections as cl from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta", "import sys import discord from discord.ext import tasks import base.command_base as base import", "# now生ログファイルを、空作成しておく with open( self.now_filepath ,\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename", "before.channel.name, \"から\" , member.name , \"#\" , member.discriminator , \"さんが抜けました\") data[\"Flag\"] = \"exit\"", "as Chart import pandas as pd class command(base.command_base) : def __init__(self) : super().__init__()", "discord.ext import tasks import base.command_base as base import base.DiscordSend as Sendtool import base.ColorPrint", "flag = CTime.check('%d %H', '01 00') else : # 1時に実行する flag = CTime.check('%M',", "if TestFlag == False : # 1日に実行する flag = CTime.check('%d %H', '01 00')", "flag = CTime.check('%M', '00') # -- 出力処理 -- if flag : sendfile =", ": print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" , member.discriminator", "= CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + \".csv\" timeData.to_csv( send_fileName ) return send_fileName", "mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") # CSVで加工済みを保存する if timeData is not None :", "= CTime.check('%d %H', '01 00') else : # 1時に実行する flag = CTime.check('%M', '00')", "= False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください -------------- if TestFlag == False : # 1日に実行する", "CSetting import command.voice_log.chart as Chart import pandas as pd class command(base.command_base) : def", "before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = after.channel.name data[\"after.channel.id\"] = after.channel.id data[\"member.name\"] = member.name", ",\"w\"):pass # 加工済みデータを作る timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode=\"NAME\") #", "config, client: discord.Client): channellist = [] if config.get(\"on_task\") is not None : if", "import base.ColorPrint as CPrint import base.time_check as CTime import os import collections as", "\"さんが抜けました\") data[\"Flag\"] = \"exit\" data[\"before.channel.name\"] = before.channel.name data[\"before.channel.id\"] = before.channel.id data[\"after.channel.name\"] = \"NULL\"", "## 抜けたら print(datetime.now().strftime(\"%Y/%m/%d %H:%M:%S\") ,\":\" , before.channel.name, \"から\" , member.name , \"#\" ,", "print(os.getcwd()) print(e) return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも # JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def", "= datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder +", "client: discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState): data = cl.OrderedDict() if", "# JSON出力(1ヵ月定期・ファイルチェンジ機能付き) async def MonthOutput(self, client: discord.Client): today = datetime.today() filetime = today", "MonthOutput(self, client: discord.Client): today = datetime.today() filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する", ") pass # 定期送信(1ヵ月) async def voice_outputlog(self, config, client: discord.Client): channellist = []", "'{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + \".json\" if sendfile", "filetime = today - relativedelta(months=1) # Renameするときのファイル名を決定する m_month = datetime.strftime(filetime,'%m') m_year = datetime.strftime(filetime,'%Y')", "datetime.strftime(filetime,'%Y') month_filename = '{0}{1}'.format(m_year, m_month) mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename +" ]
[ "docstring \"\"\" import json from fastapi import APIRouter from app.routers.files import get_document_path from", "document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name:", "@router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name: str, document_data: list ) -> Message:", "import json from fastapi import APIRouter from app.routers.files import get_document_path from app.utils.message import", "list ) -> Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE", "json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\")", "save_document_content( workspace_name: str, document_name: str, document_data: list ) -> Message: \"\"\"TODO function docstring\"\"\"", "document_name: str) -> json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE", "with open(path, \"r\") as document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async def", "function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\") as document_file:", "/ DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4) return Message( status=MsgStatus.INFO,", "str, document_name: str) -> json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) /", "module docstring \"\"\" import json from fastapi import APIRouter from app.routers.files import get_document_path", "document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name: str,", "str, document_data: list ) -> Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name)", "str, document_name: str, document_data: list ) -> Message: \"\"\"TODO function docstring\"\"\" path =", "def save_document_content( workspace_name: str, document_name: str, document_data: list ) -> Message: \"\"\"TODO function", "app.routers.files import get_document_path from app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"])", "def load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name,", "workspace_name: str, document_name: str, document_data: list ) -> Message: \"\"\"TODO function docstring\"\"\" path", "load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name)", "\"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\") as", "APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str)", "app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\")", "= \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO function", "document_name: str, document_data: list ) -> Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name,", "\"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO function docstring\"\"\"", "@router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO function docstring\"\"\" path", "async def load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO function docstring\"\"\" path =", "open(path, \"r\") as document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content(", "get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4) return", "from app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\"", "fastapi import APIRouter from app.routers.files import get_document_path from app.utils.message import Message, MsgStatus router", "import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async", "from fastapi import APIRouter from app.routers.files import get_document_path from app.utils.message import Message, MsgStatus", "Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str) -> json:", "document_name) / DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4) return Message(", "= get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\") as document_file: document_data = json.load(document_file)", "json from fastapi import APIRouter from app.routers.files import get_document_path from app.utils.message import Message,", "APIRouter from app.routers.files import get_document_path from app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\",", "\"\"\" import json from fastapi import APIRouter from app.routers.files import get_document_path from app.utils.message", "str) -> json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with", "<filename>backend/app/routers/documents.py \"\"\" TODO module docstring \"\"\" import json from fastapi import APIRouter from", "Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\")", "MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name:", "document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name: str, document_data: list ) ->", "DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str) -> json: \"\"\"TODO", "function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\") as document_file:", "path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data, document_file,", "path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\") as document_file: document_data =", "return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name: str, document_data: list )", "import APIRouter from app.routers.files import get_document_path from app.utils.message import Message, MsgStatus router =", "DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4) return Message( status=MsgStatus.INFO, detail=\"Document", "/ DOCUMENT_FILE with open(path, \"r\") as document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\")", "document_name) / DOCUMENT_FILE with open(path, \"r\") as document_file: document_data = json.load(document_file) return document_data", "Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def", "\"w\") as document_file: json.dump(document_data, document_file, indent=4) return Message( status=MsgStatus.INFO, detail=\"Document content updated successfully\"", "= json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name: str, document_data:", "DOCUMENT_FILE with open(path, \"r\") as document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async", "open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4) return Message( status=MsgStatus.INFO, detail=\"Document content updated", "async def save_document_content( workspace_name: str, document_name: str, document_data: list ) -> Message: \"\"\"TODO", "= get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4)", "\"r\") as document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name:", "from app.routers.files import get_document_path from app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document", "\"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\") as", "\"\"\" TODO module docstring \"\"\" import json from fastapi import APIRouter from app.routers.files", "docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\") as document_file: document_data", "-> Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path,", "docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"w\") as document_file: json.dump(document_data,", "with open(path, \"w\") as document_file: json.dump(document_data, document_file, indent=4) return Message( status=MsgStatus.INFO, detail=\"Document content", "-> json: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path,", "tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name: str) ->", "as document_file: document_data = json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str,", "get_document_path from app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE =", "json.load(document_file) return document_data @router.post(\"/{document_name}\") async def save_document_content( workspace_name: str, document_name: str, document_data: list", "TODO module docstring \"\"\" import json from fastapi import APIRouter from app.routers.files import", "document_data: list ) -> Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) /", ") -> Message: \"\"\"TODO function docstring\"\"\" path = get_document_path(workspace_name, document_name) / DOCUMENT_FILE with", "import get_document_path from app.utils.message import Message, MsgStatus router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE", "as document_file: json.dump(document_data, document_file, indent=4) return Message( status=MsgStatus.INFO, detail=\"Document content updated successfully\" )", "get_document_path(workspace_name, document_name) / DOCUMENT_FILE with open(path, \"r\") as document_file: document_data = json.load(document_file) return", "router = APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str,", "= APIRouter(prefix=\"/api/document\", tags=[\"Document Management\"]) DOCUMENT_FILE = \"document.json\" @router.get(\"/{document_name}\") async def load_document_content(workspace_name: str, document_name:" ]
[ "ans = chr(carry % 2 + ord('0')) + ans carry //= 2 return", "str) -> str: carry, ans = 0 , '' for i in range(max(len(a),", "= \"Bannings\" class Solution: def addBinary(self, a: str, b: str) -> str: carry,", "- ord('0') if i < len(b) else 0 ans = chr(carry % 2", "b: str) -> str: carry, ans = 0 , '' for i in", "# -*-coding:utf-8-*- __author__ = \"Bannings\" class Solution: def addBinary(self, a: str, b: str)", "\"Bannings\" class Solution: def addBinary(self, a: str, b: str) -> str: carry, ans", "= 0 , '' for i in range(max(len(a), len(b))): carry += ord(a[len(a) -", "__name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") == \"100\" assert Solution().addBinary(\"1010\", \"1011\") == \"10101\"", "carry, ans = 0 , '' for i in range(max(len(a), len(b))): carry +=", "carry == 0 else '1' + ans if __name__ == '__main__': assert Solution().addBinary(\"11\",", "carry += ord(a[len(a) - i - 1]) - ord('0') if i < len(a)", "-*-coding:utf-8-*- __author__ = \"Bannings\" class Solution: def addBinary(self, a: str, b: str) ->", "1]) - ord('0') if i < len(b) else 0 ans = chr(carry %", "#!/usr/bin/python3 # -*-coding:utf-8-*- __author__ = \"Bannings\" class Solution: def addBinary(self, a: str, b:", "len(b))): carry += ord(a[len(a) - i - 1]) - ord('0') if i <", "for i in range(max(len(a), len(b))): carry += ord(a[len(a) - i - 1]) -", "- ord('0') if i < len(a) else 0 carry += ord(b[len(b) - i", "ord('0')) + ans carry //= 2 return ans if carry == 0 else", "+ ans if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") == \"100\" assert Solution().addBinary(\"1010\",", "i < len(a) else 0 carry += ord(b[len(b) - i - 1]) -", "<gh_stars>1-10 #!/usr/bin/python3 # -*-coding:utf-8-*- __author__ = \"Bannings\" class Solution: def addBinary(self, a: str,", "0 else '1' + ans if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") ==", "- i - 1]) - ord('0') if i < len(b) else 0 ans", "if i < len(b) else 0 ans = chr(carry % 2 + ord('0'))", "__author__ = \"Bannings\" class Solution: def addBinary(self, a: str, b: str) -> str:", "0 ans = chr(carry % 2 + ord('0')) + ans carry //= 2", "i < len(b) else 0 ans = chr(carry % 2 + ord('0')) +", "= chr(carry % 2 + ord('0')) + ans carry //= 2 return ans", "ans if carry == 0 else '1' + ans if __name__ == '__main__':", "2 return ans if carry == 0 else '1' + ans if __name__", "str: carry, ans = 0 , '' for i in range(max(len(a), len(b))): carry", "else 0 carry += ord(b[len(b) - i - 1]) - ord('0') if i", ", '' for i in range(max(len(a), len(b))): carry += ord(a[len(a) - i -", "0 , '' for i in range(max(len(a), len(b))): carry += ord(a[len(a) - i", "- 1]) - ord('0') if i < len(a) else 0 carry += ord(b[len(b)", "i - 1]) - ord('0') if i < len(a) else 0 carry +=", "2 + ord('0')) + ans carry //= 2 return ans if carry ==", "range(max(len(a), len(b))): carry += ord(a[len(a) - i - 1]) - ord('0') if i", "ord(b[len(b) - i - 1]) - ord('0') if i < len(b) else 0", "def addBinary(self, a: str, b: str) -> str: carry, ans = 0 ,", "+ ord('0')) + ans carry //= 2 return ans if carry == 0", "-> str: carry, ans = 0 , '' for i in range(max(len(a), len(b))):", "- 1]) - ord('0') if i < len(b) else 0 ans = chr(carry", "chr(carry % 2 + ord('0')) + ans carry //= 2 return ans if", "+= ord(a[len(a) - i - 1]) - ord('0') if i < len(a) else", "return ans if carry == 0 else '1' + ans if __name__ ==", "+= ord(b[len(b) - i - 1]) - ord('0') if i < len(b) else", "if i < len(a) else 0 carry += ord(b[len(b) - i - 1])", "in range(max(len(a), len(b))): carry += ord(a[len(a) - i - 1]) - ord('0') if", "len(a) else 0 carry += ord(b[len(b) - i - 1]) - ord('0') if", "ord(a[len(a) - i - 1]) - ord('0') if i < len(a) else 0", "'' for i in range(max(len(a), len(b))): carry += ord(a[len(a) - i - 1])", "class Solution: def addBinary(self, a: str, b: str) -> str: carry, ans =", "carry //= 2 return ans if carry == 0 else '1' + ans", "ord('0') if i < len(b) else 0 ans = chr(carry % 2 +", "//= 2 return ans if carry == 0 else '1' + ans if", "< len(a) else 0 carry += ord(b[len(b) - i - 1]) - ord('0')", "if carry == 0 else '1' + ans if __name__ == '__main__': assert", "str, b: str) -> str: carry, ans = 0 , '' for i", "i - 1]) - ord('0') if i < len(b) else 0 ans =", "i in range(max(len(a), len(b))): carry += ord(a[len(a) - i - 1]) - ord('0')", "Solution: def addBinary(self, a: str, b: str) -> str: carry, ans = 0", "1]) - ord('0') if i < len(a) else 0 carry += ord(b[len(b) -", "if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") == \"100\" assert Solution().addBinary(\"1010\", \"1011\") ==", "- i - 1]) - ord('0') if i < len(a) else 0 carry", "addBinary(self, a: str, b: str) -> str: carry, ans = 0 , ''", "carry += ord(b[len(b) - i - 1]) - ord('0') if i < len(b)", "+ ans carry //= 2 return ans if carry == 0 else '1'", "else '1' + ans if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") == \"100\"", "else 0 ans = chr(carry % 2 + ord('0')) + ans carry //=", "a: str, b: str) -> str: carry, ans = 0 , '' for", "ans = 0 , '' for i in range(max(len(a), len(b))): carry += ord(a[len(a)", "ans carry //= 2 return ans if carry == 0 else '1' +", "% 2 + ord('0')) + ans carry //= 2 return ans if carry", "< len(b) else 0 ans = chr(carry % 2 + ord('0')) + ans", "0 carry += ord(b[len(b) - i - 1]) - ord('0') if i <", "== 0 else '1' + ans if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\")", "'1' + ans if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") == \"100\" assert", "len(b) else 0 ans = chr(carry % 2 + ord('0')) + ans carry", "ord('0') if i < len(a) else 0 carry += ord(b[len(b) - i -", "ans if __name__ == '__main__': assert Solution().addBinary(\"11\", \"1\") == \"100\" assert Solution().addBinary(\"1010\", \"1011\")" ]
[]
[ "of cards currently in the dealer's hand. If the dealer currently has a", "return round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method settles any bets at the", "round_complete else: round_complete = True bet_amount = player_hand.get_bet() if self.is_natural() and not player_hand.is_natural():", "whether card is added to the hand face-up or face-down. By default, the", "resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method automatically resolves the dealer's hand: drawing", "for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities", "method is called against a dealer's hand object. Where None, the orientation of", "with all cards face-up: returns a list of integers. For hands with any", "added to the hand. Once a player decides to 'stand' at their hand's", "cards until the hand value exceeds seventeen. Method initially checks the dealer's hand", "This module exports the 'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related methods.", "self._bust def is_natural(self): \"\"\" As a boolean, returns 'natural' status of hand (2", "because: the first card dealt to the dealer will always be dealt face-down;", "self.print_hand() def __len__(self): \"\"\"Allows len() to be used on hand objects, returning the", "player gets a natural and the dealer did not, they are immediately paid", "and related methods. \"\"\" import time draw_delay = 1 # The pause in", "deletes this card from the deck. If the 'face_dir' input argument requires the", "being initialised. The name of this player is queried and set used to", "0: hand_value_list = [ str(value) + \" + *-*\" * face_down_count for value", "list of int / str A list containing all possible values the hand's", "object for a given participant. Parameters ---------- holder_name : str Defines the owner,", "bust and updates all hand statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The", "blackjack.player.Player The player object that owns the hand being initialised. The name of", "status communicates whether the hand is still active in the current round self._bust", "of face_dir not spelling 'up' (case-insensitive) will add the card face-down. Raises ------", "hand's constituent cards. If no hand value <= 21, 'best_value' = None. \"\"\"", "sums face-up cards that aren't an ace for card in self: # Try", "attribute). \"\"\" for card in self._live_hand: yield card def __repr__(self): \"\"\" Entering the", "in the current round. Returns ------- bool True when hand can still receive", "face-up with face_dir = 'up'. Any value of face_dir not spelling 'up' (case-insensitive)", "from the deck. If the 'face_dir' input argument requires the hand to be", "current round, following the settling of naturals; otherwise False (and the round continues).", "any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled between the dealer and a", "detects naturals and settles any bets as necessary; returns True if round is", "ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set = [ ace_values[1] + ace_sum_element for", "are special because bets can be made against these hands. \"\"\" def __init__(self,", "A hand object is a collection of cards associated with either the dealer", "and player's hands are compared. If the player wins, their player object is", "face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the hand's", "(> 21), returns None. Returns ------- best_value : int or None The best", "a card will be removed from this deck and added to the current", "dealer and a player unless both participants have 'stood' or gone bust.\" if", "For hands with any cards face-down: returns a list of strings. \"\"\" ace_count", "and adds this card to the hand with orientation defined by 'face_dir'. Parameters", "possible summed values for. ace_values : tuple A two-element tuple containing the possible", "gone bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score = 0 else: dealer_score =", "0 else: dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else: bet_amount =", "Hand class, initialising an empty hand object for the player. Parameters ---------- player_obj", "hand value exceeds 21; otherwise False. \"\"\" return self._bust def is_natural(self): \"\"\" As", "for this base hand class. \"\"\" self._live_hand = ( [] ) # A", "None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in", "of the card is determined by the number of cards currently in the", "card from the input deck and adds this card to the hand with", "non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 # This if-else block defines", "dealer has gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck", "player_obj): \"\"\" Method settles any bets at the end of the round; where", "No action, round ends and bet is collected (discarded) automatically with player's hand", "is returned. If it's a draw, the bet is returned to the player's", "as a sorted list. Parameters ---------- ace_count : int The number of ace", "not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between", "Returns ------- best_value : int or None The best possible total value of", "the hand is bust (value > 21) in the current round self._natural =", "bust (value > 21) in the current round self._natural = False # The", "the value of their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live'", "face-down, the freshly drawn card (face-up by default) calls its 'flip_card' method to", "with any cards face-down: returns a list of strings. \"\"\" ace_count = 0", "tuple containing the possible card values an ace can take e.g. (1, 11).", "hand_value_list def best_hand_value(self): \"\"\" Returns the best possible value of the hand as", "The outcome of each round is determined by the relative values of the", "deck object, the deck returns a single card object and deletes this card", "------- empty_string : str An empty string, returned so that the 'print_hand' method", "2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount *", "str Prints the hand's owner followed by shorthand details of all cards currently", "in the hand if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum =", "to 21 or is a natural. Updates hand status accordingly.\"\"\" natural_length = 2", "a player decides to 'stand' at their hand's current value, or if they", "card will be removed from this deck and added to the current hand", "hand. Parameters ---------- alt_text : str This optional argument will be printed instead", "bool Returns True if no further actions are possible in the current round,", "when hand can still receive cards in the current round; otherwise False. \"\"\"", "and methods of a hand object. A hand object is a collection of", "face-up. If the method is called with face_dir specified, it behaves identically to", ": blackjack.player.Player The player object that owns the input 'player_hand'. Where a payout", "currently within the hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len() to be", "cards in the current round; otherwise False. \"\"\" return self._active def is_bust(self): \"\"\"", "bet by a player against this hand: initially zero player_name = player_obj.get_name() super().__init__(player_name)", "'print_hand' method : str Prints the hand's owner followed by shorthand details of", "face_dir specified, it behaves identically to the equivalent method on the base Hand", "face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method", "'natural' if it contains two cards with a total value of 21. Players", "will be removed from this deck and added to the current hand object.", "a natural (value = 21 with 2 cards) self._holder_name = holder_name def __iter__(self):", "class PlayerHand(Hand): \"\"\" A subclass defining the properties and methods specific to a", "print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self):", "all cards face-up: returns a list of integers. For hands with any cards", "A string that communicates the players score. As the dealer's hand is resolved,", "card will be removed from this deck and added to the dealer's hand", "\"\"\" for card in self._live_hand: yield card def __repr__(self): \"\"\" Entering the reference", "for card in self._live_hand: yield card def __repr__(self): \"\"\" Entering the reference for", "+= card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 # This if-else block defines a", "------- bool True when hand can still receive cards in the current round;", "hand is read and compared to the status of the dealer's hand. Where", "hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls the __init__ method of the base", "returns the active status of the hand in the current round (bust/stand =", "(each having their own respective subclasses with specialised methods and attributes). Within a", "these hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls the __init__ method of the", "score is printed each time the dealer's hand is printed so the user", "def print_hand(self, alt_text=None): \"\"\" Prints the hand's owner followed by shorthand details of", "possible values of a collection of ace cards as a sorted list. Parameters", "\"\"\" import time draw_delay = 1 # The pause in seconds between drawn", "of the hand's constituent cards. If no hand value <= 21, 'best_value' =", "to the screen.\"\"\" print(\"\\n---------------\") for card in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer", "returns a list of integers. For hands with any cards face-down: returns a", "If hand value is bust (> 21), returns None. Returns ------- best_value :", "this deck and added to the current hand object. face_dir : str Defines", "blackjack.deck.Deck The game's 'live' deck object - cards may be removed from this", "hands are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object.", "= 21 # Ideal score value for both players class Hand: \"\"\" A", "between the dealer and a player unless both participants have 'stood' or gone", "score. As the dealer's hand is resolved, the players score is printed each", "of the dealer's and player's hands are compared. If the player wins, their", "= [0] for ace_idx in range(ace_count): first_set = [ ace_values[0] + ace_sum_element for", "Raised when the hand is inactive (can't accept further cards). \"\"\" assert (", "{self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand is bust, has value", "return self._natural def stand(self): \"\"\"Updates hand status to inactive: triggered when player chooses", "player chooses to draw no more cards in the current round.\"\"\" self._active =", "be printed instead of the hand owner's name if provided. Returns ------- empty_string", "attributes). Within a round of blackjack, cards are added to a hand when", "hand object. player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. Allows the player's", "of all cards currently within the hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows", "the round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete", "player_hand, player_score_message): \"\"\" This method automatically resolves the dealer's hand: drawing cards until", "Defaults to 'Player' for this base hand class. \"\"\" self._live_hand = ( []", "inactive: triggered when player chooses to draw no more cards in the current", "the base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1:", "self.stand() if len(self) == natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\"", "This if-else block defines a list of possible values associated with all face-up", "or goes bust. The dealer's final hand score is printed to the screen", "method on the base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self)", "'live' hand object. The value of this hand is read and compared to", "game flow, this bet amount has already been verified as positive and has", "if ( self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one and alt_text is not", "object - cards may be removed from this deck and added to the", "* payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method settles any bets", "a player. Players' hands are special because bets can be made against these", "Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The 'natural' status", "An attribute holding the amount bet by a player against this hand: initially", "+ ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set = [ ace_values[1] + ace_sum_element", "value of the dealer's and player's hands are compared. If the player wins,", "Loop: counts number of face-down cards in the hand; counts face-up aces; sums", "hand: drawing cards until the hand value exceeds seventeen. Method initially checks the", "hand in the current round (bust/stand = False; otherwise = True). A hand", "the card is dealt face-down; otherwise face-up. If the method is called with", "in self: # Try statement catches AssertionErrors thrown when 'is_ace' method encounters a", "method is called with face_dir specified, it behaves identically to the equivalent method", "amount def get_bet(self): \"\"\"Returns the amount bet against this player's hand as a", "(bust/stand = False; otherwise = True). A hand is regarded as active in", "the hand array. Finally, the method calls '_validate_hand_status' that checks whether the hand", "amount has already been verified as positive and has already been removed from", "concluded, otherwise False. A hand is a 'natural' if it contains two cards", "card contains two cards with combined value of 21; otherwise False. \"\"\" return", "] second_set = [ ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities", "def settle_bet(self, player_hand, player_obj): \"\"\" Method settles any bets at the end of", "class defining the properties and methods of a hand object. A hand object", "score value for both players class Hand: \"\"\" A class defining the properties", "balance is uneffected. The bet placed against their hand is lost when a", "the hand object bseing created: either 'Player' or 'Dealer'. Defaults to 'Player' for", "1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def", "object and deletes this card from the deck. If the 'face_dir' input argument", "card from the deck. If the 'face_dir' input argument requires the hand to", "the hands '_active' attribute is set to False signalling that no further actions", "\"\"\" return self._natural def stand(self): \"\"\"Updates hand status to inactive: triggered when player", "print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one and", "= 21: returns True; otherwise False). Returns ------- bool True when card contains", "The next card in the hand (within the hand object's '_live_hand' attribute). \"\"\"", "= deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\"", "# Player wins 1.5x their original bet; multiplier is 2.5x so bet amount", "properties and methods specific to a hand object held by the dealer. The", "this hand: it is marked as inactive in the current round.\" drawn_card =", "possible total value of the hand's constituent cards. If no hand value <=", "owner followed by shorthand details of all cards currently within the hand. Parameters", "or None The best possible total value of the hand's constituent cards. If", "the dealer's hand object. face_dir : None / str Defines whether card is", "against these hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls the __init__ method of", "associated with all face-up cards in the hand if ace_count > 0: ace_sum_possibilities", "ace_values = None face_down_count = 0 non_ace_sum = 0 # Loop: counts number", "as necessary; returns True if round is concluded, otherwise False. A hand is", "card is added to the hand face-up or face-down. By default, the card", "hand; counts face-up aces; sums face-up cards that aren't an ace for card", "argument requires the hand to be dealt face-down, the freshly drawn card (face-up", "deck_obj : blackjack.deck.Deck The game's 'live' deck object - a card will be", "called against a dealer's hand object. Where None, the orientation of the card", "True when card contains two cards with combined value of 21; otherwise False.", "prints hand details to the screen.\"\"\" print(\"\\n---------------\") for card in self: if not", "each time the dealer's hand is printed so the user can easily compare", "and methods specific to a hand object held by a player. Players' hands", "The value of this hand is read and compared to the value of", "the dealer's hand. Where a payout is required, the amount bet against the", "player against this hand: initially zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount):", "set to False signalling that no further actions are required by the player", "of a round, the dealer resolves this bet. Parameters ---------- amount : float", "face_dir=None): \"\"\" Removes one card from the input deck and adds this card", "a card will be removed from this deck and added to the dealer's", "def __init__(self): \"\"\"Calls the __init__ method of the base Hand class, initialising an", "not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled between the dealer and", "value of this hand is read and compared to the value of the", "game's 'live' deck object - cards may be removed from this deck and", "\"\"\" Returns the total value(s) of the target hand by summing the values", "drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the hand's owner followed by", "to allow any number of possible ace values (additional loop over keys of", "both participants have 'stood' or gone bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score", "but they receive no winnings. If the player loses, the method exits and", "total value of 21. Players and dealers can get naturals upon drawing their", "by the dealer. The dealer's hand is unique because: the first card dealt", "payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand):", "A hand is regarded as active in a round while cards can still", "communicates the players score. As the dealer's hand is resolved, the players score", "'natural' status of this hand is read and compared to the status of", "by a player. Players' hands are special because bets can be made against", "print(\"\\n---------------\") for card in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\")", "is set to False signalling that no further actions are required by the", "bet placed against their hand is lost when a new round starts and", "super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a bet made by a player to", "the current round self._natural = False # The natural status communicates whether the", "if self.best_hand_value() is None: self._bust = True self.stand() elif self.best_hand_value() == twenty_one: self.stand()", "def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values of a collection of ace", "of 'print_hand' method : str Prints the hand's owner followed by shorthand details", "is printed each time the dealer's hand is printed so the user can", "objects to be iterated over, yielding constituent card objects in the order they", "if no further actions are possible in the current round, following the settling", "is_active(self): \"\"\" As a boolean, returns the active status of the hand in", "as the dealer's hand is resolved. player_score_message : str A string that communicates", "a draw, the bet is returned to the player's balance but they receive", "dealer. The dealer's hand is unique because: the first card dealt to the", "of strings. \"\"\" ace_count = 0 ace_values = None face_down_count = 0 non_ace_sum", "= False return round_complete else: round_complete = True bet_amount = player_hand.get_bet() if self.is_natural()", "0 ) # An attribute holding the amount bet by a player against", "from this deck and added to the dealer's hand object. player_hand : blackjack.hand.PlayerHand", "hand: it is marked as inactive in the current round.\" drawn_card = deck_obj.deal_card()", "hand_value_list = [ str(value) + \" + *-*\" * face_down_count for value in", "'stand' at their hand's current value, or if they go bust (> 21),", "'_holder_name' attribute on the base class. This name is then displayed when printing", "draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card from the input deck and adds", "removed from this deck and added to the current hand object. face_dir :", "combined value of 21; otherwise False. \"\"\" return self._natural def stand(self): \"\"\"Updates hand", "object's '_live_hand' attribute). \"\"\" for card in self._live_hand: yield card def __repr__(self): \"\"\"", "dealer's hand is printed so the user can easily compare the relative scores.", "two cards with a total value of 21. Players and dealers can get", "hand as an integer. If hand value is bust (> 21), returns None.", "base hand class. \"\"\" self._live_hand = ( [] ) # A list of", "resolved. player_score_message : str A string that communicates the players score. As the", "integer. If hand value is bust (> 21), returns None. Returns ------- best_value", "is uneffected. The bet placed against their hand is lost when a new", "dealer's and player's hands are compared. If the player wins, their player object", "hand object is a collection of cards associated with either the dealer or", "a sorted list. Parameters ---------- ace_count : int The number of ace cards", "and not player_hand.is_natural(): # No action, round ends and bet is collected (discarded)", "and methods specific to a hand object held by the dealer. The dealer's", "a natural and the dealer did not, they are immediately paid 1.5x the", "is still active in the current round self._bust = False # The bust", "of a collection of ace cards as a sorted list. Parameters ---------- ace_count", "ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text is not None: print(alt_text) elif ends_with_s:", "TODO: Refactor to allow any number of possible ace values (additional loop over", "hand object held by a player. Players' hands are special because bets can", "bet against the hand object. In typical game flow, this bet amount has", "\"\"\" A class defining the properties and methods of a hand object. A", "the properties and methods of a hand object. A hand object is a", "object is a collection of cards associated with either the dealer or a", "'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s) of the", "first_set = [ ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set =", "Returns ------- bool True when hand can still receive cards in the current", "hand object's '_live_hand' attribute). \"\"\" for card in self._live_hand: yield card def __repr__(self):", "True). A hand is regarded as active in a round while cards can", "accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - a", "return self._bust def is_natural(self): \"\"\" As a boolean, returns 'natural' status of hand", "method of the base Hand class, initialising an empty hand object for the", "default) calls its 'flip_card' method to ensure the card is correctly face-down before", "their hand is lost when a new round starts and new hands are", "a hand object in the terminal triggers this method, printing all hand details.", "dealer's hand is resolved. player_score_message : str A string that communicates the players", "= 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount", "21: returns True; otherwise False). Returns ------- bool True when lowest possible hand", "'up' (case-insensitive) will add the card face-down. Raises ------ AssertionError Raised when the", "\"Cannot draw a card to this hand: it is marked as inactive in", "'print_hand' method can be called by the Hand class' __repr__ method which must", "the card will be added face-up with face_dir = 'up'. Any value of", "current value, or if they go bust (> 21), the hands '_active' attribute", "boolean, returns 'natural' status of hand (2 cards in hand and value =", "object. The value of this hand is read and compared to the value", "printed instead of the hand owner's name if provided. Returns ------- empty_string :", "player wins, their player object is payed the value of their bet plus", "ace cards to calculate possible summed values for. ace_values : tuple A two-element", "dealt to the dealer will always be dealt face-down; the dealer's turn in", "with either the dealer or a player (each having their own respective subclasses", "the player holding the hand in the current round. Returns ------- bool True", ": str Defines whether card is added to the hand face-up or face-down.", "payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining the properties and methods specific to", "print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in the hand face-up and prints", "deck object - a card will be removed from this deck and added", "a total value of 21. Players and dealers can get naturals upon drawing", "exits and their bet is lost. The value of the dealer's and player's", "in the current round self._bust = False # The bust status communicates whether", "read into 'bet_amount'. player_obj : blackjack.player.Player The player object that owns the input", "wins, their player object is payed the value of their bet plus the", "try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down)", "\"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand object for a given", "drawing their first two cards at the start of a round. If the", "), \"Bets cannot be settled between the dealer and a player unless both", ": str Defines the owner, or 'holder', of the hand object bseing created:", "the orientation of the card is determined by the number of cards currently", "a payout is required, this player's balance will be updated accordingly. Returns -------", "upon drawing their first two cards at the start of a round. If", "= True bet_amount = player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): # No action,", "defining the properties and methods of a hand object. A hand object is", "deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\") payout_multiplier =", "= player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif", "len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj,", "\"\"\"Allows len() to be used on hand objects, returning the number of cards", "bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in the hand face-up", "details. Returns ------- Output of 'print_hand' method : str Prints the hand's owner", "face-down cards in calculating the value(s) of the hand. Defaults to False. Returns", "cards, this block adds the consistent face-down string to the face-up values if", "hand's owner followed by shorthand details of all cards currently within the hand.", "in calculating the value(s) of the hand. Defaults to False. Returns ------- hand_value_list", "= [ ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set = [", "round; otherwise False. \"\"\" return self._active def is_bust(self): \"\"\" As a boolean, returns", "settled between the dealer and a player unless both participants have 'stood' or", "[ str(value) + \" + *-*\" * face_down_count for value in hand_value_list ]", "= \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method automatically", "'face_dir'. Calls the 'deal_card' method of an input deck object, the deck returns", "associated player chooses to 'hit'. The outcome of each round is determined by", "value in hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\" Returns the best possible", "owner's name if provided. Returns ------- empty_string : str An empty string, returned", "player_hand.is_active()) ), \"Bets cannot be settled between the dealer and a player unless", "been verified as positive and has already been removed from the player's balance.", "balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount *", "face_down_count += 1 # This if-else block defines a list of possible values", "returned so that the 'print_hand' method can be called by the Hand class'", "Once a player decides to 'stand' at their hand's current value, or if", "natural, the round is over and they collect the bet of any player", "\"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj,", "holder_name def __iter__(self): \"\"\" Allows hand objects to be iterated over, yielding constituent", "'up'. Any value of face_dir not spelling 'up' (case-insensitive) will add the card", "are immediately paid 1.5x the value of their bet. Parameters ---------- player_hand :", "requires the hand to be dealt face-down, the freshly drawn card (face-up by", "return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s) of the target", "goes bust. The dealer's final hand score is printed to the screen or", "made by a player to the current hand object: at the end of", "cards face-up: returns a list of integers. For hands with any cards face-down:", "inactive in the current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card()", "in the current round self._natural = False # The natural status communicates whether", "time the dealer's hand is printed so the user can easily compare the", "hand is unique because: the first card dealt to the dealer will always", "of hand in the current round (value > 21: returns True; otherwise False).", "face-up or face-down. By default, 'face_dir' is None when method is called against", "= 0 # Loop: counts number of face-down cards in the hand; counts", "face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self,", "required, this player's balance will be updated accordingly. Returns ------- round_complete : bool", "the player's hand to be printed for comparison as the dealer's hand is", "bet plus the original bet amount is returned. If it's a draw, the", "Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - a card", "to ensure the card is correctly face-down before it it is appended to", "a natural (dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(),", "card to the hand with orientation defined by 'face_dir'. Calls the 'deal_card' method", "hands are compared. If the player wins, their player object is payed the", "by shorthand details of all cards currently within the hand. \"\"\" return self.print_hand()", "hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals and settles", "when 'is_ace' method encounters a face-down card try: if card.is_ace(bypass_face_down): ace_count += 1", "value exceeds 17 or goes bust. The dealer's final hand score is printed", "in the current round.\"\"\" self._active = False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes", "a list of integers. For hands with any cards face-down: returns a list", "round of blackjack, cards are added to a hand when the associated player", "wins 1.5x their original bet; multiplier is 2.5x so bet amount is also", "PlayerHand(Hand): \"\"\" A subclass defining the properties and methods specific to a hand", "hand with orientation defined by 'face_dir'. Calls the 'deal_card' method of an input", "\"\"\" This module exports the 'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related", "or if they go bust (> 21), the hands '_active' attribute is set", "up the hand; initialised as an empty list self._active = True # The", "deck and adds this card to the hand with orientation defined by 'face_dir'.", "compared to the value of the dealer's hand. Where a payout is required,", "dealer's hand object. face_dir : None / str Defines whether card is added", "reference for a hand object in the terminal triggers this method, printing all", "score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand()", "# Loop: counts number of face-down cards in the hand; counts face-up aces;", "the dealer and a player unless both participants have 'stood' or gone bust.\"", "name if provided. Returns ------- empty_string : str An empty string, returned so", "player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier)", "whether the hand is still active in the current round self._bust = False", "dealer resolves this bet. Parameters ---------- amount : float The amount bet against", "str Defines whether card is added to the hand face-up or face-down. By", "_calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values of a collection of ace cards", "elif len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\"", "A list containing all possible values the hand's combination of cards can take", "blackjack.hand.PlayerHand A player's 'live' hand object. The value of this hand is read", "marked as inactive in the current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() !=", "hand to be printed for comparison as the dealer's hand is resolved. player_score_message", "ace_values): \"\"\" Returns the possible values of a collection of ace cards as", "the bet of any player who did not also get a natural. If", "statement catches AssertionErrors thrown when 'is_ace' method encounters a face-down card try: if", "hand object held by the dealer. The dealer's hand is unique because: the", "value > 17, the dealer stands. If < 17, the hand draws cards", "amount): \"\"\" Adds a bet made by a player to the current hand", "object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card", "an ace for card in self: # Try statement catches AssertionErrors thrown when", "player decides to 'stand' at their hand's current value, or if they go", "current round self._bust = False # The bust status communicates whether the hand", "'live' deck object - cards may be removed from this deck and added", "is marked as inactive in the current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower()", "Players and dealers can get naturals upon drawing their first two cards at", "round is concluded, otherwise False. A hand is a 'natural' if it contains", "The bet placed against their hand is lost when a new round starts", "The natural status communicates whether the hand is a natural (value = 21", "for idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust()", "= 21 with 2 cards) self._holder_name = holder_name def __iter__(self): \"\"\" Allows hand", "self._reveal_hand() print(\"Dealer has a natural!\") elif not self.is_natural() and player_hand.is_natural(): # Player wins", "self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values of", "status of this hand is read and compared to the status of the", "value of 21; otherwise False. \"\"\" return self._natural def stand(self): \"\"\"Updates hand status", "details to the screen.\"\"\" print(\"\\n---------------\") for card in self: if not card.is_face_up(): card.flip_card()", "value of their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand", "'best_value' = None. \"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value =", "def _reveal_hand(self): \"\"\"Turns all cards in the hand face-up and prints hand details", "'live' deck object - a card will be removed from this deck and", "must return a string-like object. \"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower() ==", "status communicates whether the hand is bust (value > 21) in the current", "typical game flow, this bet amount has already been verified as positive and", "True if no further actions are possible in the current round, following the", "the hand if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [", "the hand; initialised as an empty list self._active = True # The active", "objects, returning the number of cards in the hand as the object 'length'.\"\"\"", "naturals; otherwise False (and the round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete", "\" + *-*\" * face_down_count for value in hand_value_list ] return hand_value_list def", "A player's 'live' hand object. Allows the player's hand to be printed for", "of aces can combine to make. TODO: Refactor to allow any number of", "methods of a hand object. A hand object is a collection of cards", "\"\"\" self._bet = float( 0 ) # An attribute holding the amount bet", "in a single round must be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the", "the hand draws cards until its value exceeds 17 or goes bust. The", "the dealer's hand object. player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. Allows", "resolved, the players score is printed each time the dealer's hand is printed", "a dealer's hand object. Where None, the orientation of the card is determined", "cards associated with either the dealer or a player (each having their own", "automatically resolves the dealer's hand: drawing cards until the hand value exceeds seventeen.", "so the user can easily compare the relative scores. \"\"\" dealer_target = 17", "player_hand, player_obj): \"\"\" Method detects naturals and settles any bets as necessary; returns", "card objects in the order they were added. Yields ------ card : blackjack.card.Card", "For a hand with all cards face-up: returns a list of integers. For", "= False # The natural status communicates whether the hand is a natural", "their bet plus the original bet amount is returned. If it's a draw,", "draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card from the input deck and adds", "holding the amount bet by a player against this hand: initially zero player_name", "and compared to the status of the dealer's hand. Where a payout is", "\"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay)", "a hand with all cards face-up: returns a list of integers. For hands", "hand being initialised. The name of this player is queried and set used", "hand. Defaults to False. Returns ------- hand_value_list : list of int / str", "related methods. \"\"\" import time draw_delay = 1 # The pause in seconds", "seventeen. Method initially checks the dealer's hand value: if its best value >", "If a player gets a natural and the dealer did not, they are", "to this hand: it is marked as inactive in the current round.\" drawn_card", "'ace_count' number of aces can combine to make. TODO: Refactor to allow any", "face-up or face-down. By default, the card will be added face-up with face_dir", "otherwise = True). A hand is regarded as active in a round while", "Hand: \"\"\" A class defining the properties and methods of a hand object.", "def get_bet(self): \"\"\"Returns the amount bet against this player's hand as a float.\"\"\"", "hand object for a given participant. Parameters ---------- holder_name : str Defines the", "# The natural status communicates whether the hand is a natural (value =", "cards in the current round.\"\"\" self._active = False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\"", "string to the face-up values if face_down_count > 0: hand_value_list = [ str(value)", "either the dealer or a player (each having their own respective subclasses with", "= 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while", "added to the dealer's hand object. player_hand : blackjack.hand.PlayerHand A player's 'live' hand", "this player is queried and set used to define the '_holder_name' attribute on", "own respective subclasses with specialised methods and attributes). Within a round of blackjack,", "settle_bet(self, player_hand, player_obj): \"\"\" Method settles any bets at the end of the", "21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand()", "returns None. Returns ------- best_value : int or None The best possible total", "hand object. Allows the player's hand to be printed for comparison as the", "the current round.\"\"\" self._active = False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one", "while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay)", "False signalling that no further actions are required by the player holding the", "with combined value of 21; otherwise False. \"\"\" return self._natural def stand(self): \"\"\"Updates", "is resolved, the players score is printed each time the dealer's hand is", "21) in the current round self._natural = False # The natural status communicates", "at the end of the round; where the player loses, the method exits", "def __len__(self): \"\"\"Allows len() to be used on hand objects, returning the number", "will be printed instead of the hand owner's name if provided. Returns -------", "is also read into 'bet_amount'. player_obj : blackjack.player.Player The player object that owns", "the hand face-up and prints hand details to the screen.\"\"\" print(\"\\n---------------\") for card", "\"\"\" As a boolean, returns 'bust' status of hand in the current round", "twenty_one: self.stand() if len(self) == natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values):", "has a single card in their hand, the card is dealt face-down; otherwise", "dealer currently has a single card in their hand, the card is dealt", "value, or if they go bust (> 21), the hands '_active' attribute is", "the hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len() to be used on", "at their hand's current value, or if they go bust (> 21), the", "to a hand object held by a player. Players' hands are special because", "game's 'live' deck object - a card will be removed from this deck", "> 0: hand_value_list = [ str(value) + \" + *-*\" * face_down_count for", "base class. This name is then displayed when printing hand details to screen.", "hand when the associated player chooses to 'hit'. The outcome of each round", "A list of card objects making up the hand; initialised as an empty", "if face_down_count > 0: hand_value_list = [ str(value) + \" + *-*\" *", "def __repr__(self): \"\"\" Entering the reference for a hand object in the terminal", "1.5x their original bet; multiplier is 2.5x so bet amount is also deposited", "to make. TODO: Refactor to allow any number of possible ace values (additional", "to the dealer's hand object. face_dir : None / str Defines whether card", "on the base class. This name is then displayed when printing hand details", "spelling 'up' (case-insensitive) will add the card face-down. Raises ------ AssertionError Raised when", "print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message)", "the end of the round; where the player loses, the method exits and", "> 17, the dealer stands. If < 17, the hand draws cards until", "of ace cards as a sorted list. Parameters ---------- ace_count : int The", "with no duplicates. For a hand with all cards face-up: returns a list", "------- round_complete : bool Returns True if no further actions are possible in", "if card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except", "elif not self.is_natural() and player_hand.is_natural(): # Player wins 1.5x their original bet; multiplier", "for card in self: # Try statement catches AssertionErrors thrown when 'is_ace' method", "class' __repr__ method which must return a string-like object. \"\"\" empty_string = \"\"", "their original bet; multiplier is 2.5x so bet amount is also deposited back", "alt_text : str This optional argument will be printed instead of the hand", "self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score", "---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The 'natural' status of", "of an input deck object, the deck returns a single card object and", "def __init__(self, player_obj): \"\"\" Calls the __init__ method of the base Hand class,", "holder_name : str Defines the owner, or 'holder', of the hand object bseing", "(self.best_hand_value() == twenty_one and alt_text is not None) ): print(f\"Value: {self.hand_value()}\") return empty_string", "__iter__(self): \"\"\" Allows hand objects to be iterated over, yielding constituent card objects", "the active status of the hand in the current round (bust/stand = False;", "'stood' or gone bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score = 0 else:", "printed for comparison as the dealer's hand is resolved. player_score_message : str A", "= 0 non_ace_sum = 0 # Loop: counts number of face-down cards in", "otherwise False). Returns ------- bool True when card contains two cards with combined", "return self.print_hand() def __len__(self): \"\"\"Allows len() to be used on hand objects, returning", "is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx,", "communicates whether the hand is still active in the current round self._bust =", "catches AssertionErrors thrown when 'is_ace' method encounters a face-down card try: if card.is_ace(bypass_face_down):", "of possible values associated with all face-up cards in the hand if ace_count", "player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining the properties and methods", "all hand details. Returns ------- Output of 'print_hand' method : str Prints the", "= self.hand_value(bypass_face_down=True) try: best_value = max([val for val in all_hand_values if val <=", "Removes one card from the input deck and adds this card to the", "positive and has already been removed from the player's balance. \"\"\" self._bet +=", "# The active status communicates whether the hand is still active in the", "is None: self._bust = True self.stand() elif self.best_hand_value() == twenty_one: self.stand() if len(self)", "a hand object held by a player. Players' hands are special because bets", "---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - cards may be", "dealer's hand is resolved, the players score is printed each time the dealer's", "amount is returned. If it's a draw, the bet is returned to the", "list of possible values associated with all face-up cards in the hand if", "face-up cards in the hand if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values)", "either 'Player' or 'Dealer'. Defaults to 'Player' for this base hand class. \"\"\"", "round; where the player loses, the method exits and their bet is lost.", "payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1", "player unless both participants have 'stood' or gone bust.\" if player_hand.is_bust(): return if", "card is dealt face-down; otherwise face-up. If the method is called with face_dir", "attribute on the base class. This name is then displayed when printing hand", "value of face_dir not spelling 'up' (case-insensitive) will add the card face-down. Raises", "class, initialising an empty hand object for the player. Parameters ---------- player_obj :", "float( 0 ) # An attribute holding the amount bet by a player", "or gone bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score = 0 else: dealer_score", "the hand owner's name if provided. Returns ------- empty_string : str An empty", "value is bust (> 21), returns None. Returns ------- best_value : int or", "resolves the dealer's hand: drawing cards until the hand value exceeds seventeen. Method", "does the dealer! It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return", "as active in a round while cards can still be added to the", "Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir =", "to the hand. Once a player decides to 'stand' at their hand's current", "their hand's current value, or if they go bust (> 21), the hands", "hand object. face_dir : str Defines whether card is added to the hand", "values an ace can take e.g. (1, 11). Returns ------- ace_sum_possibilities : list", "= list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining", "bets can be made against these hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls", "- a card will be removed from this deck and added to the", "self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand,", "bust. The dealer's final hand score is printed to the screen or the", "self._live_hand: yield card def __repr__(self): \"\"\" Entering the reference for a hand object", "returned. If it's a draw, the bet is returned to the player's balance", "updated accordingly. Returns ------- round_complete : bool Returns True if no further actions", "* payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining the properties and methods specific", "cards in the hand as the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False):", "object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s) of", "Parameters ---------- alt_text : str This optional argument will be printed instead of", "17, the hand draws cards until its value exceeds 17 or goes bust.", "hand by summing the values of all constituent card objects. Parameters ---------- bypass_face_down", "owner followed by shorthand details of all cards currently within the hand. \"\"\"", "compared. If the player wins, their player object is payed the value of", "the round; where the player loses, the method exits and their bet is", "{single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one and alt_text is", "string, returned so that the 'print_hand' method can be called by the Hand", "deck and added to the dealer's hand object. face_dir : None / str", "False). Returns ------- bool True when card contains two cards with combined value", "value: if its best value > 17, the dealer stands. If < 17,", "the possible card values an ace can take e.g. (1, 11). Returns -------", "over, yielding constituent card objects in the order they were added. Yields ------", "------- bool True when lowest possible hand value exceeds 21; otherwise False. \"\"\"", "face_down_count > 0: hand_value_list = [ str(value) + \" + *-*\" * face_down_count", "not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card", "player. Parameters ---------- player_obj : blackjack.player.Player The player object that owns the hand", "21, 'best_value' = None. \"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value", "card values an ace can take e.g. (1, 11). Returns ------- ace_sum_possibilities :", "a single card in their hand, the card is dealt face-down; otherwise face-up.", "new round starts and new hands are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand", "the order they were added. Yields ------ card : blackjack.card.Card The next card", "string-like object. \"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text", "iterated over, yielding constituent card objects in the order they were added. Yields", "'face_dir' input argument requires the hand to be dealt face-down, the freshly drawn", "is called against a dealer's hand object. Where None, the orientation of the", "= max([val for val in all_hand_values if val <= max_best_value]) except ValueError: best_value", "= True). A hand is regarded as active in a round while cards", "player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The 'natural' status of this", "print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\")", "updates all hand statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live'", "is required, this player's balance will be updated accordingly. Returns ------- round_complete :", "if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() ==", "hand value is bust (> 21), returns None. Returns ------- best_value : int", "if self.is_bust(): dealer_score = 0 else: dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value():", "If the 'face_dir' input argument requires the hand to be dealt face-down, the", "did not also get a natural. If a player gets a natural and", "into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's a", "player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The value of this hand", "empty hand object for a given participant. Parameters ---------- holder_name : str Defines", "player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def", "< 17, the hand draws cards until its value exceeds 17 or goes", "\"\"\" Prints the hand's owner followed by shorthand details of all cards currently", "hand (within the hand object's '_live_hand' attribute). \"\"\" for card in self._live_hand: yield", "hand array. Finally, the method calls '_validate_hand_status' that checks whether the hand is", "_reveal_hand(self): \"\"\"Turns all cards in the hand face-up and prints hand details to", "naturals and settles any bets as necessary; returns True if round is concluded,", "payout is required, this player's balance will be updated accordingly. \"\"\" assert not", "best_value = None return best_value def is_active(self): \"\"\" As a boolean, returns the", "twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj)", "end of the round; where the player loses, the method exits and their", "+ second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining the properties", "is unique because: the first card dealt to the dealer will always be", "the hand face-up or face-down. By default, the card will be added face-up", "by default) calls its 'flip_card' method to ensure the card is correctly face-down", "if-else block defines a list of possible values associated with all face-up cards", "into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount", "exceeds 17 or goes bust. The dealer's final hand score is printed to", "bust, has value equal to 21 or is a natural. Updates hand status", "hand is regarded as active in a round while cards can still be", "21 with 2 cards) self._holder_name = holder_name def __iter__(self): \"\"\" Allows hand objects", "the dealer did not, they are immediately paid 1.5x the value of their", "2 if self.best_hand_value() is None: self._bust = True self.stand() elif self.best_hand_value() == twenty_one:", "and compared to the value of the dealer's hand. Where a payout is", "chooses to 'hit'. The outcome of each round is determined by the relative", "self.is_natural() and not player_hand.is_natural(): # No action, round ends and bet is collected", "action, round ends and bet is collected (discarded) automatically with player's hand self._reveal_hand()", "a natural. Updates hand status accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is None:", "is a 'natural' if it contains two cards with a total value of", "in their hand, the card is dealt face-down; otherwise face-up. If the method", "natural_length = 2 if self.best_hand_value() is None: self._bust = True self.stand() elif self.best_hand_value()", "hand and value = 21: returns True; otherwise False). Returns ------- bool True", "Where a payout is required, this player's balance will be updated accordingly. \"\"\"", "float The amount bet against the hand object. In typical game flow, this", "is queried and set used to define the '_holder_name' attribute on the base", "if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value()", "values for. ace_values : tuple A two-element tuple containing the possible card values", "super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method automatically resolves the", "is 2.5x so bet amount is also deposited back into balance print(f\"\\n{player_obj.get_name()} has", "\"\"\" self._bet += amount def get_bet(self): \"\"\"Returns the amount bet against this player's", "take with no duplicates. For a hand with all cards face-up: returns a", "its 'flip_card' method to ensure the card is correctly face-down before it it", "hand in the current round (value > 21: returns True; otherwise False). Returns", "of int A list containing each value 'ace_count' number of aces can combine", "hand as the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the", "collection of ace cards as a sorted list. Parameters ---------- ace_count : int", "\"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len() to be used on hand objects,", "payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj): \"\"\"", "owner, or 'holder', of the hand object bseing created: either 'Player' or 'Dealer'.", "against a dealer's hand object. Where None, the orientation of the card is", "called by the Hand class' __repr__ method which must return a string-like object.", "they go bust (> 21), the hands '_active' attribute is set to False", "relative scores. \"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got", "yield card def __repr__(self): \"\"\" Entering the reference for a hand object in", "else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if (", "hand object bseing created: either 'Player' or 'Dealer'. Defaults to 'Player' for this", "of this hand is read and compared to the value of the dealer's", "all face-up cards in the hand if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count,", "and value = 21: returns True; otherwise False). Returns ------- bool True when", "is now bust and updates all hand statuses accordingly. Parameters ---------- deck_obj :", "for ace_idx in range(ace_count): first_set = [ ace_values[0] + ace_sum_element for ace_sum_element in", "blackjack.player.Player The player object that owns the input 'player_hand'. Where a payout is", "value of the hand's constituent cards. If no hand value <= 21, 'best_value'", "By default, the card will be added face-up with face_dir = 'up'. Any", "the number of cards in the hand as the object 'length'.\"\"\" return len(self._live_hand)", "if dealer_score > player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score:", "second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining the properties and", "non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list = [non_ace_sum]", "queried and set used to define the '_holder_name' attribute on the base class.", "the user can easily compare the relative scores. \"\"\" dealer_target = 17 print(player_score_message)", "object held by a player. Players' hands are special because bets can be", "specific to a hand object held by a player. Players' hands are special", "player_obj : blackjack.player.Player The player object that owns the hand being initialised. The", "otherwise False). Returns ------- bool True when lowest possible hand value exceeds 21;", ") # A list of card objects making up the hand; initialised as", "at the start of a round. If the dealer gets a natural, the", "in a round while cards can still be added to the hand. Once", "+= 1 # This if-else block defines a list of possible values associated", "natural (value = 21 with 2 cards) self._holder_name = holder_name def __iter__(self): \"\"\"", "player. Players' hands are special because bets can be made against these hands.", "the amount bet against the hand is also read into 'bet_amount'. player_obj :", "on hand objects, returning the number of cards in the hand as the", "__init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand object for a given participant. Parameters", "return if self.is_bust(): dealer_score = 0 else: dealer_score = self.best_hand_value() if dealer_score >", "hand object. A hand object is a collection of cards associated with either", "or 'holder', of the hand object bseing created: either 'Player' or 'Dealer'. Defaults", "+= amount def get_bet(self): \"\"\"Returns the amount bet against this player's hand as", "hand if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility", "adds the consistent face-down string to the face-up values if face_down_count > 0:", "player's balance. \"\"\" self._bet += amount def get_bet(self): \"\"\"Returns the amount bet against", "always be dealt face-down; the dealer's turn in a single round must be", "on the base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) ==", "dealer's hand value: if its best value > 17, the dealer stands. If", "self.best_hand_value() is None: self._bust = True self.stand() elif self.best_hand_value() == twenty_one: self.stand() if", "hand is resolved. player_score_message : str A string that communicates the players score.", "round self._natural = False # The natural status communicates whether the hand is", "all cards currently within the hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len()", "with all face-up cards in the hand if ace_count > 0: ace_sum_possibilities =", "and player_hand.is_natural(): # Player wins 1.5x their original bet; multiplier is 2.5x so", "its value exceeds 17 or goes bust. The dealer's final hand score is", "Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The value of", "dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand()", "\"\"\" Method detects naturals and settles any bets as necessary; returns True if", "necessary; returns True if round is concluded, otherwise False. A hand is a", "The dealer's final hand score is printed to the screen or the player", "holder_name=\"Player\"): \"\"\" Initialises an empty hand object for a given participant. Parameters ----------", "bypass_face_down : bool Tells method whether to include face-down cards in calculating the", "an input deck object, the deck returns a single card object and deletes", "this block adds the consistent face-down string to the face-up values if face_down_count", "is None when method is called against a dealer's hand object. Where None,", "player_obj : blackjack.player.Player The player object that owns the input 'player_hand'. Where a", "settling of naturals; otherwise False (and the round continues). \"\"\" if not any((self.is_natural(),", "collected (discarded) automatically with player's hand self._reveal_hand() print(\"Dealer has a natural!\") elif not", "from this deck and added to the current hand object. face_dir : str", "cards may be removed from this deck and added to the dealer's hand", "the method calls '_validate_hand_status' that checks whether the hand is now bust and", "the hand is inactive (can't accept further cards). \"\"\" assert ( self.is_active() ),", "by the player holding the hand in the current round. Returns ------- bool", "if provided. Returns ------- empty_string : str An empty string, returned so that", "active in a round while cards can still be added to the hand.", "__init__ method of the base Hand class, initialising an empty hand object for", "Within a round of blackjack, cards are added to a hand when the", "return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining the properties and methods specific", "17 or goes bust. The dealer's final hand score is printed to the", "a natural, the round is over and they collect the bet of any", "followed by shorthand details of all cards currently within the hand. \"\"\" return", "ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining the properties and methods specific to", "the amount bet by a player against this hand: initially zero player_name =", "go bust (> 21), the hands '_active' attribute is set to False signalling", "a card to this hand: it is marked as inactive in the current", "is a collection of cards associated with either the dealer or a player", "each round is determined by the relative values of the player's and dealer's", "player who did not also get a natural. If a player gets a", "of hand (2 cards in hand and value = 21: returns True; otherwise", "bet is deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does", "and prints hand details to the screen.\"\"\" print(\"\\n---------------\") for card in self: if", "bets as necessary; returns True if round is concluded, otherwise False. A hand", ": blackjack.hand.PlayerHand A player's 'live' hand object. The 'natural' status of this hand", "sorted list. Parameters ---------- ace_count : int The number of ace cards to", "hand, the card is dealt face-down; otherwise face-up. If the method is called", "this deck and added to the dealer's hand object. player_hand : blackjack.hand.PlayerHand A", "2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and dealer:", "blackjack.deck.Deck The game's 'live' deck object - a card will be removed from", ": blackjack.player.Player The player object that owns the hand being initialised. The name", "(and the round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete = False return", "no more cards in the current round.\"\"\" self._active = False def draw_card(self, deck_obj,", "\"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text is not", "specified, it behaves identically to the equivalent method on the base Hand class.", "the player loses, the method exits and their bet is lost. The value", "face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else:", "round, the dealer resolves this bet. Parameters ---------- amount : float The amount", "status of the hand in the current round (bust/stand = False; otherwise =", "hand face-up or face-down. By default, 'face_dir' is None when method is called", "participants have 'stood' or gone bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score =", "two cards with combined value of 21; otherwise False. \"\"\" return self._natural def", ": blackjack.deck.Deck The game's 'live' deck object - a card will be removed", "its best value > 17, the dealer stands. If < 17, the hand", "If the dealer currently has a single card in their hand, the card", "Returns ------- bool True when lowest possible hand value exceeds 21; otherwise False.", "from the player's balance. \"\"\" self._bet += amount def get_bet(self): \"\"\"Returns the amount", "possible in the current round, following the settling of naturals; otherwise False (and", "removed from the player's balance. \"\"\" self._bet += amount def get_bet(self): \"\"\"Returns the", "max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val for val in", "The value of the dealer's and player's hands are compared. If the player", "this bet. Parameters ---------- amount : float The amount bet against the hand", "a single round must be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__", "can easily compare the relative scores. \"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value()", "] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A", "* face_down_count for value in hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\" Returns", "card is correctly face-down before it it is appended to the hand array.", "is appended to the hand array. Finally, the method calls '_validate_hand_status' that checks", "cards at the start of a round. If the dealer gets a natural,", "= self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value()", "in the current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card)", "they receive no winnings. If the player loses, the method exits and their", "except ValueError: best_value = None return best_value def is_active(self): \"\"\" As a boolean,", "in the dealer's hand. If the dealer currently has a single card in", "and added to the dealer's hand object. player_hand : blackjack.hand.PlayerHand A player's 'live'", "ace_count = 0 ace_values = None face_down_count = 0 non_ace_sum = 0 #", "def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card from the input deck and", "'bet_amount'. player_obj : blackjack.player.Player The player object that owns the input 'player_hand'. Where", "print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals and settles any", "method of an input deck object, the deck returns a single card object", "accordingly. Returns ------- round_complete : bool Returns True if no further actions are", "object: at the end of a round, the dealer resolves this bet. Parameters", "for val in all_hand_values if val <= max_best_value]) except ValueError: best_value = None", "def _verify_hand_status(self): \"\"\"Checks whether the hand is bust, has value equal to 21", "method can be called by the Hand class' __repr__ method which must return", "self._active = False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card from the", "face_down_count = 0 non_ace_sum = 0 # Loop: counts number of face-down cards", "the Hand class' __repr__ method which must return a string-like object. \"\"\" empty_string", "two cards at the start of a round. If the dealer gets a", "player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score:", "bet is collected (discarded) automatically with player's hand self._reveal_hand() print(\"Dealer has a natural!\")", "_verify_hand_status(self): \"\"\"Checks whether the hand is bust, has value equal to 21 or", "an empty hand object for a given participant. Parameters ---------- holder_name : str", "currently in the dealer's hand. If the dealer currently has a single card", "print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand is bust, has", "1 # The pause in seconds between drawn card actions twenty_one = 21", "= 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val for val in all_hand_values", "must be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__ method of the", "(2 cards in hand and value = 21: returns True; otherwise False). Returns", "enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one", "bet of any player who did not also get a natural. If a", "and their balance is uneffected. The bet placed against their hand is lost", "specialised methods and attributes). Within a round of blackjack, cards are added to", "were added. Yields ------ card : blackjack.card.Card The next card in the hand", "max([val for val in all_hand_values if val <= max_best_value]) except ValueError: best_value =", "a payout is required, the amount bet against the hand is also read", "start of a round. If the dealer gets a natural, the round is", "the dealer has gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live'", "hand object. Where None, the orientation of the card is determined by the", "cards currently within the hand. Parameters ---------- alt_text : str This optional argument", "if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand()", "hand is a 'natural' if it contains two cards with a total value", "bust. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - cards", "of card objects making up the hand; initialised as an empty list self._active", "ValueError: best_value = None return best_value def is_active(self): \"\"\" As a boolean, returns", "value for both players class Hand: \"\"\" A class defining the properties and", "their player object is payed the value of their bet plus the original", "it contains two cards with a total value of 21. Players and dealers", "equal to 21 or is a natural. Updates hand status accordingly.\"\"\" natural_length =", "is printed to the screen or the player is informed that the dealer", "shorthand details of all cards currently within the hand. \"\"\" return self.print_hand() def", "the hand face-up or face-down. By default, 'face_dir' is None when method is", "hand statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object", "Allows the player's hand to be printed for comparison as the dealer's hand", "'hit'. The outcome of each round is determined by the relative values of", "------- bool True when card contains two cards with combined value of 21;", "dealer or a player (each having their own respective subclasses with specialised methods", "held by a player. Players' hands are special because bets can be made", "with player's hand self._reveal_hand() print(\"Dealer has a natural!\") elif not self.is_natural() and player_hand.is_natural():", "the dealer's hand is resolved, the players score is printed each time the", "actions are possible in the current round, following the settling of naturals; otherwise", "list of card objects making up the hand; initialised as an empty list", "- cards may be removed from this deck and added to the dealer's", "a player against this hand: initially zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self,", "otherwise False. A hand is a 'natural' if it contains two cards with", "hand with orientation defined by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The game's", "self.is_bust() or (self.best_hand_value() == twenty_one and alt_text is not None) ): print(f\"Value: {self.hand_value()}\")", "__len__(self): \"\"\"Allows len() to be used on hand objects, returning the number of", "21 # Ideal score value for both players class Hand: \"\"\" A class", "= False # The bust status communicates whether the hand is bust (value", "this hand: initially zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds", "ace_count : int The number of ace cards to calculate possible summed values", "hand is now bust and updates all hand statuses accordingly. Parameters ---------- deck_obj", "hand value exceeds seventeen. Method initially checks the dealer's hand value: if its", "round must be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__ method of", "'holder', of the hand object bseing created: either 'Player' or 'Dealer'. Defaults to", "to be printed for comparison as the dealer's hand is resolved. player_score_message :", "of integers. For hands with any cards face-down: returns a list of strings.", "hand. Where a payout is required, the amount bet against the hand is", "best_value def is_active(self): \"\"\" As a boolean, returns the active status of the", "their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The", "best possible total value of the hand's constituent cards. If no hand value", "held by the dealer. The dealer's hand is unique because: the first card", "that communicates the players score. As the dealer's hand is resolved, the players", "hand_value_list : list of int / str A list containing all possible values", "hand contains face-down cards, this block adds the consistent face-down string to the", "first card dealt to the dealer will always be dealt face-down; the dealer's", "else: hand_value_list = [non_ace_sum] # Where the hand contains face-down cards, this block", "so that the 'print_hand' method can be called by the Hand class' __repr__", "# The pause in seconds between drawn card actions twenty_one = 21 #", "following the settling of naturals; otherwise False (and the round continues). \"\"\" if", "returns True if round is concluded, otherwise False. A hand is a 'natural'", "face-down: returns a list of strings. \"\"\" ace_count = 0 ace_values = None", "------ AssertionError Raised when the hand is inactive (can't accept further cards). \"\"\"", "thrown when 'is_ace' method encounters a face-down card try: if card.is_ace(bypass_face_down): ace_count +=", "returns True; otherwise False). Returns ------- bool True when lowest possible hand value", "self.is_active() ), \"Cannot draw a card to this hand: it is marked as", "or the player is informed that the dealer has gone bust. Parameters ----------", "= \"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj,", "amount bet against the hand is also read into 'bet_amount'. player_obj : blackjack.player.Player", "argument will be printed instead of the hand owner's name if provided. Returns", "'Player' for this base hand class. \"\"\" self._live_hand = ( [] ) #", "Any value of face_dir not spelling 'up' (case-insensitive) will add the card face-down.", "0 non_ace_sum = 0 # Loop: counts number of face-down cards in the", "is informed that the dealer has gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck", "to the value of the dealer's hand. Where a payout is required, the", "self._holder_name[-1].lower() == \"s\" if alt_text is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\")", "the hand value exceeds seventeen. Method initially checks the dealer's hand value: if", "verified as positive and has already been removed from the player's balance. \"\"\"", "exports the 'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related methods. \"\"\" import", "draw no more cards in the current round.\"\"\" self._active = False def draw_card(self,", "[ ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set +", "The game's 'live' deck object - a card will be removed from this", "counts face-up aces; sums face-up cards that aren't an ace for card in", "ace_sum = [ possibility + non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list =", "returns a list of strings. \"\"\" ace_count = 0 ace_values = None face_down_count", "AssertionErrors thrown when 'is_ace' method encounters a face-down card try: if card.is_ace(bypass_face_down): ace_count", "is_natural(self): \"\"\" As a boolean, returns 'natural' status of hand (2 cards in", "between drawn card actions twenty_one = 21 # Ideal score value for both", "natural. Updates hand status accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is None: self._bust", "attribute holding the amount bet by a player against this hand: initially zero", "the card is correctly face-down before it it is appended to the hand", "with specialised methods and attributes). Within a round of blackjack, cards are added", "The number of ace cards to calculate possible summed values for. ace_values :", "player_hand.is_natural())): # Stand-off between player and dealer: player's bet is deposited back into", "False. Returns ------- hand_value_list : list of int / str A list containing", "__init__(self): \"\"\"Calls the __init__ method of the base Hand class, initialising an empty", "is read and compared to the value of the dealer's hand. Where a", "to inactive: triggered when player chooses to draw no more cards in the", "bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score = 0 else: dealer_score = self.best_hand_value()", "# Ideal score value for both players class Hand: \"\"\" A class defining", "before it it is appended to the hand array. Finally, the method calls", ": bool Tells method whether to include face-down cards in calculating the value(s)", "AssertionError: face_down_count += 1 # This if-else block defines a list of possible", "face-down string to the face-up values if face_down_count > 0: hand_value_list = [", "consistent face-down string to the face-up values if face_down_count > 0: hand_value_list =", "self._active def is_bust(self): \"\"\" As a boolean, returns 'bust' status of hand in", "drawn card (face-up by default) calls its 'flip_card' method to ensure the card", "initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The value", "---------- holder_name : str Defines the owner, or 'holder', of the hand object", "face_dir = 'up'. Any value of face_dir not spelling 'up' (case-insensitive) will add", "round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete else:", "used on hand objects, returning the number of cards in the hand as", "21; otherwise False. \"\"\" return self._bust def is_natural(self): \"\"\" As a boolean, returns", "the current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status()", "int / str A list containing all possible values the hand's combination of", "in the hand; counts face-up aces; sums face-up cards that aren't an ace", "# Where the hand contains face-down cards, this block adds the consistent face-down", "has a natural!\") elif not self.is_natural() and player_hand.is_natural(): # Player wins 1.5x their", "hand with all cards face-up: returns a list of integers. For hands with", "Method settles any bets at the end of the round; where the player", "triggers this method, printing all hand details. Returns ------- Output of 'print_hand' method", "method to ensure the card is correctly face-down before it it is appended", "A two-element tuple containing the possible card values an ace can take e.g.", "to the dealer's hand object. player_hand : blackjack.hand.PlayerHand A player's 'live' hand object.", "blackjack, cards are added to a hand when the associated player chooses to", "of cards in the hand as the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self,", "+= 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count +=", "number of possible ace values (additional loop over keys of dict?) \"\"\" ace_sum_possibilities", "( [] ) # A list of card objects making up the hand;", "return best_value def is_active(self): \"\"\" As a boolean, returns the active status of", "the properties and methods specific to a hand object held by a player.", "Parameters ---------- amount : float The amount bet against the hand object. In", "If the method is called with face_dir specified, it behaves identically to the", "and updates all hand statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The game's", "get_bet(self): \"\"\"Returns the amount bet against this player's hand as a float.\"\"\" return", "is not None) ): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether the", "whether card is added to the hand face-up or face-down. By default, 'face_dir'", "player_hand.is_natural())): round_complete = False return round_complete else: round_complete = True bet_amount = player_hand.get_bet()", "hand to be dealt face-down, the freshly drawn card (face-up by default) calls", "the owner, or 'holder', of the hand object bseing created: either 'Player' or", "hand's current value, or if they go bust (> 21), the hands '_active'", "they collect the bet of any player who did not also get a", "= True self.stand() elif self.best_hand_value() == twenty_one: self.stand() if len(self) == natural_length: self._natural", "0 # Loop: counts number of face-down cards in the hand; counts face-up", "values (additional loop over keys of dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx", "placed against their hand is lost when a new round starts and new", "bust status communicates whether the hand is bust (value > 21) in the", "---------- amount : float The amount bet against the hand object. In typical", "is concluded, otherwise False. A hand is a 'natural' if it contains two", "'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related methods. \"\"\" import time draw_delay", "defined by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object", "for a given participant. Parameters ---------- holder_name : str Defines the owner, or", "# The bust status communicates whether the hand is bust (value > 21)", "face-down; otherwise face-up. If the method is called with face_dir specified, it behaves", "or a player (each having their own respective subclasses with specialised methods and", "any number of possible ace values (additional loop over keys of dict?) \"\"\"", "deck and added to the dealer's hand object. player_hand : blackjack.hand.PlayerHand A player's", "card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects", "that aren't an ace for card in self: # Try statement catches AssertionErrors", "if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj):", "the number of cards currently in the dealer's hand. If the dealer currently", "Method initially checks the dealer's hand value: if its best value > 17,", "Where the hand contains face-down cards, this block adds the consistent face-down string", "payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method settles any bets at", "face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method automatically resolves the dealer's", "player's 'live' hand object. The 'natural' status of this hand is read and", "face-up aces; sums face-up cards that aren't an ace for card in self:", "ace_count += 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count", "bust (> 21), the hands '_active' attribute is set to False signalling that", "in the order they were added. Yields ------ card : blackjack.card.Card The next", "the hand with orientation defined by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The", "no further actions are possible in the current round, following the settling of", "in the current round; otherwise False. \"\"\" return self._active def is_bust(self): \"\"\" As", "0 ace_values = None face_down_count = 0 non_ace_sum = 0 # Loop: counts", "else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount *", "int A list containing each value 'ace_count' number of aces can combine to", "face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\"", "initially checks the dealer's hand value: if its best value > 17, the", "best value > 17, the dealer stands. If < 17, the hand draws", "False. \"\"\" return self._bust def is_natural(self): \"\"\" As a boolean, returns 'natural' status", "As the dealer's hand is resolved, the players score is printed each time", "len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s) of the target hand", "dealer gets a natural, the round is over and they collect the bet", "by the relative values of the player's and dealer's hands. \"\"\" def __init__(self,", "a list of strings. \"\"\" ace_count = 0 ace_values = None face_down_count =", "the screen.\"\"\" print(\"\\n---------------\") for card in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals", "added. Yields ------ card : blackjack.card.Card The next card in the hand (within", "to False. Returns ------- hand_value_list : list of int / str A list", "time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if", "\"\"\" As a boolean, returns 'natural' status of hand (2 cards in hand", "can combine to make. TODO: Refactor to allow any number of possible ace", "by the number of cards currently in the dealer's hand. If the dealer", "flow, this bet amount has already been verified as positive and has already", "else: dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet()", "\"\"\" Method settles any bets at the end of the round; where the", "values of the player's and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises", "cards currently within the hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len() to", "the dealer's hand value: if its best value > 17, the dealer stands.", "returns True; otherwise False). Returns ------- bool True when card contains two cards", "hand. If the dealer currently has a single card in their hand, the", "informed that the dealer has gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck The", "ace_idx in range(ace_count): first_set = [ ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities", "'player_hand'. Where a payout is required, this player's balance will be updated accordingly.", "object that owns the hand being initialised. The name of this player is", "of ace cards to calculate possible summed values for. ace_values : tuple A", "player and dealer: player's bet is deposited back into balance print(f\"\\n{player_obj.get_name()} has a", "block defines a list of possible values associated with all face-up cards in", "---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - a card will", "else: round_complete = True bet_amount = player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): #", "the dealer's turn in a single round must be resolved automatically. \"\"\" def", "cards in the hand if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum", "ends and bet is collected (discarded) automatically with player's hand self._reveal_hand() print(\"Dealer has", "a 'natural' if it contains two cards with a total value of 21.", "balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's a stand-off!\")", "which must return a string-like object. \"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower()", ": str This optional argument will be printed instead of the hand owner's", "the input deck and adds this card to the hand with orientation defined", "if self.is_natural() and not player_hand.is_natural(): # No action, round ends and bet is", "receive cards in the current round; otherwise False. \"\"\" return self._active def is_bust(self):", "is deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does the", "in hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\" Returns the best possible value", "value <= 21, 'best_value' = None. \"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True)", "round ends and bet is collected (discarded) automatically with player's hand self._reveal_hand() print(\"Dealer", "has a natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's a stand-off!\") payout_multiplier =", "resolves this bet. Parameters ---------- amount : float The amount bet against the", "The pause in seconds between drawn card actions twenty_one = 21 # Ideal", "deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints", "otherwise False. \"\"\" return self._active def is_bust(self): \"\"\" As a boolean, returns 'bust'", "the total value(s) of the target hand by summing the values of all", "list of strings. \"\"\" ace_count = 0 ace_values = None face_down_count = 0", "status of the dealer's hand. Where a payout is required, the amount bet", "hand_value_list = [non_ace_sum] # Where the hand contains face-down cards, this block adds", "now bust and updates all hand statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck", "in self._live_hand: yield card def __repr__(self): \"\"\" Entering the reference for a hand", "object, the deck returns a single card object and deletes this card from", "possible value of the hand as an integer. If hand value is bust", "ace_sum_possibilities ] second_set = [ ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities ]", "deck_obj : blackjack.deck.Deck The game's 'live' deck object - cards may be removed", "face-up and prints hand details to the screen.\"\"\" print(\"\\n---------------\") for card in self:", "by the Hand class' __repr__ method which must return a string-like object. \"\"\"", "take e.g. (1, 11). Returns ------- ace_sum_possibilities : list of int A list", "hands '_active' attribute is set to False signalling that no further actions are", "'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - a", "is required, the amount bet against the hand is also read into 'bet_amount'.", "containing the possible card values an ace can take e.g. (1, 11). Returns", "balance will be updated accordingly. Returns ------- round_complete : bool Returns True if", "the hand in the current round. Returns ------- bool True when hand can", "this player's balance will be updated accordingly. Returns ------- round_complete : bool Returns", "ace_sum_possibilities : list of int A list containing each value 'ace_count' number of", "making up the hand; initialised as an empty list self._active = True #", "as positive and has already been removed from the player's balance. \"\"\" self._bet", "def hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s) of the target hand by", "accept further cards). \"\"\" assert ( self.is_active() ), \"Cannot draw a card to", "hand is bust, has value equal to 21 or is a natural. Updates", "hand owner's name if provided. Returns ------- empty_string : str An empty string,", "dealer_score > player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier", "False # The natural status communicates whether the hand is a natural (value", "empty_string : str An empty string, returned so that the 'print_hand' method can", "properties and methods of a hand object. A hand object is a collection", "{idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one and alt_text", "input 'player_hand'. Where a payout is required, this player's balance will be updated", "= player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): # No action, round ends and", "of cards associated with either the dealer or a player (each having their", "== twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target:", "> 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility + non_ace_sum for", "paid 1.5x the value of their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A", "'is_ace' method encounters a face-down card try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values", "card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 # This if-else block defines a list", "add_bet(self, amount): \"\"\" Adds a bet made by a player to the current", "= True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values of a", "the current round, following the settling of naturals; otherwise False (and the round", "final hand score is printed to the screen or the player is informed", "0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility + non_ace_sum for possibility", "face-down card try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum", "the 'face_dir' input argument requires the hand to be dealt face-down, the freshly", "read and compared to the status of the dealer's hand. Where a payout", "value of the hand as an integer. If hand value is bust (>", "class. \"\"\" self._live_hand = ( [] ) # A list of card objects", "strings. \"\"\" ace_count = 0 ace_values = None face_down_count = 0 non_ace_sum =", "for ace_sum_element in ace_sum_possibilities ] second_set = [ ace_values[1] + ace_sum_element for ace_sum_element", "called with face_dir specified, it behaves identically to the equivalent method on the", "[non_ace_sum] # Where the hand contains face-down cards, this block adds the consistent", "\"\"\" A subclass defining the properties and methods specific to a hand object", "a bet made by a player to the current hand object: at the", "status to inactive: triggered when player chooses to draw no more cards in", "of all cards currently within the hand. Parameters ---------- alt_text : str This", "calculate possible summed values for. ace_values : tuple A two-element tuple containing the", "------- ace_sum_possibilities : list of int A list containing each value 'ace_count' number", "card is determined by the number of cards currently in the dealer's hand.", "printing hand details to screen. \"\"\" self._bet = float( 0 ) # An", "the method exits and their balance is uneffected. The bet placed against their", "player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value()", "break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns", "hand object. face_dir : None / str Defines whether card is added to", "print(\"Dealer has a natural!\") elif not self.is_natural() and player_hand.is_natural(): # Player wins 1.5x", "the player wins, their player object is payed the value of their bet", "The best possible total value of the hand's constituent cards. If no hand", "The name of this player is queried and set used to define the", "self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() >", "hand objects, returning the number of cards in the hand as the object", "idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or", "unique because: the first card dealt to the dealer will always be dealt", "for a hand object in the terminal triggers this method, printing all hand", "hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s) of the target hand by summing", "/ str A list containing all possible values the hand's combination of cards", "int The number of ace cards to calculate possible summed values for. ace_values", "method automatically resolves the dealer's hand: drawing cards until the hand value exceeds", "constituent cards. If no hand value <= 21, 'best_value' = None. \"\"\" max_best_value", "object is payed the value of their bet plus the original bet amount", "returns 'natural' status of hand (2 cards in hand and value = 21:", "player_hand.is_natural(): # No action, round ends and bet is collected (discarded) automatically with", "player holding the hand in the current round. Returns ------- bool True when", "whether the hand is a natural (value = 21 with 2 cards) self._holder_name", "by a player against this hand: initially zero player_name = player_obj.get_name() super().__init__(player_name) def", "possible card values an ace can take e.g. (1, 11). Returns ------- ace_sum_possibilities", "default, the card will be added face-up with face_dir = 'up'. Any value", "this hand is read and compared to the status of the dealer's hand.", "round is over and they collect the bet of any player who did", "card dealt to the dealer will always be dealt face-down; the dealer's turn", "original bet amount is returned. If it's a draw, the bet is returned", "in the current round (bust/stand = False; otherwise = True). A hand is", "card actions twenty_one = 21 # Ideal score value for both players class", "blackjack.card.Card The next card in the hand (within the hand object's '_live_hand' attribute).", "be removed from this deck and added to the dealer's hand object. face_dir", "to 'stand' at their hand's current value, or if they go bust (>", "player to the current hand object: at the end of a round, the", "the hand is now bust and updates all hand statuses accordingly. Parameters ----------", ": blackjack.hand.PlayerHand A player's 'live' hand object. Allows the player's hand to be", "be removed from this deck and added to the dealer's hand object. player_hand", "Ideal score value for both players class Hand: \"\"\" A class defining the", "otherwise False. \"\"\" return self._bust def is_natural(self): \"\"\" As a boolean, returns 'natural'", "Where a payout is required, the amount bet against the hand is also", "Defaults to False. Returns ------- hand_value_list : list of int / str A", "possible hand value exceeds 21; otherwise False. \"\"\" return self._bust def is_natural(self): \"\"\"", "to the current hand object: at the end of a round, the dealer", "object for the player. Parameters ---------- player_obj : blackjack.player.Player The player object that", "Returns ------- bool True when card contains two cards with combined value of", "ace cards as a sorted list. Parameters ---------- ace_count : int The number", "actions twenty_one = 21 # Ideal score value for both players class Hand:", "can take e.g. (1, 11). Returns ------- ace_sum_possibilities : list of int A", "value(s) of the hand. Defaults to False. Returns ------- hand_value_list : list of", "of the base Hand class, initialising an empty hand object for the dealer.\"\"\"", "face-up values if face_down_count > 0: hand_value_list = [ str(value) + \" +", "initialising an empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None):", "bool Tells method whether to include face-down cards in calculating the value(s) of", "is over and they collect the bet of any player who did not", "ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\"", "drawn card actions twenty_one = 21 # Ideal score value for both players", "\"\"\"Returns the amount bet against this player's hand as a float.\"\"\" return self._bet", "if round is concluded, otherwise False. A hand is a 'natural' if it", "not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\"", "to a hand when the associated player chooses to 'hit'. The outcome of", "natural. If a player gets a natural and the dealer did not, they", "'deal_card' method of an input deck object, the deck returns a single card", "value of their bet plus the original bet amount is returned. If it's", "== twenty_one and alt_text is not None) ): print(f\"Value: {self.hand_value()}\") return empty_string def", "Returns the possible values of a collection of ace cards as a sorted", ": None / str Defines whether card is added to the hand face-up", "\"\"\" assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled between the", "== natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible", "for comparison as the dealer's hand is resolved. player_score_message : str A string", "defining the properties and methods specific to a hand object held by the", "a single card object and deletes this card from the deck. If the", "defines a list of possible values associated with all face-up cards in the", "---------- ace_count : int The number of ace cards to calculate possible summed", "\"Bets cannot be settled between the dealer and a player unless both participants", "default, 'face_dir' is None when method is called against a dealer's hand object.", "possible values the hand's combination of cards can take with no duplicates. For", "ace can take e.g. (1, 11). Returns ------- ace_sum_possibilities : list of int", "draw a card to this hand: it is marked as inactive in the", "the hand. Defaults to False. Returns ------- hand_value_list : list of int /", "all hand statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck", "The player object that owns the input 'player_hand'. Where a payout is required,", "best_value : int or None The best possible total value of the hand's", "have 'stood' or gone bust.\" if player_hand.is_bust(): return if self.is_bust(): dealer_score = 0", "---------- bypass_face_down : bool Tells method whether to include face-down cards in calculating", "return a string-like object. \"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower() == \"s\"", "player's and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand", "is bust (value > 21) in the current round self._natural = False #", "bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier)", "it behaves identically to the equivalent method on the base Hand class. \"\"\"", "card to this hand: it is marked as inactive in the current round.\"", "status of hand in the current round (value > 21: returns True; otherwise", "player loses, the method exits and their bet is lost. The value of", "Stand-off between player and dealer: player's bet is deposited back into balance print(f\"\\n{player_obj.get_name()}", "aren't an ace for card in self: # Try statement catches AssertionErrors thrown", "list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining the", "---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The value of this", "The player object that owns the hand being initialised. The name of this", "twenty_one and alt_text is not None) ): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self):", "hand in the current round. Returns ------- bool True when hand can still", "values of all constituent card objects. Parameters ---------- bypass_face_down : bool Tells method", "summing the values of all constituent card objects. Parameters ---------- bypass_face_down : bool", "the players score. As the dealer's hand is resolved, the players score is", "screen or the player is informed that the dealer has gone bust. Parameters", "naturals upon drawing their first two cards at the start of a round.", "__repr__(self): \"\"\" Entering the reference for a hand object in the terminal triggers", "player_obj): \"\"\" Method detects naturals and settles any bets as necessary; returns True", "printed each time the dealer's hand is printed so the user can easily", "True when lowest possible hand value exceeds 21; otherwise False. \"\"\" return self._bust", "\"\"\" assert ( self.is_active() ), \"Cannot draw a card to this hand: it", "order they were added. Yields ------ card : blackjack.card.Card The next card in", "dealt face-down; the dealer's turn in a single round must be resolved automatically.", "zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a bet made", "the round is over and they collect the bet of any player who", "gets a natural and the dealer did not, they are immediately paid 1.5x", "cards) self._holder_name = holder_name def __iter__(self): \"\"\" Allows hand objects to be iterated", "dealer's hand object. player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. Allows the", "<= 21, 'best_value' = None. \"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try:", "a payout is required, this player's balance will be updated accordingly. \"\"\" assert", "no winnings. If the player loses, the method exits and their balance is", "None face_down_count = 0 non_ace_sum = 0 # Loop: counts number of face-down", "end of a round, the dealer resolves this bet. Parameters ---------- amount :", "still receive cards in the current round; otherwise False. \"\"\" return self._active def", "for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card from", "otherwise False. \"\"\" return self._natural def stand(self): \"\"\"Updates hand status to inactive: triggered", "the hand as an integer. If hand value is bust (> 21), returns", "> 21) in the current round self._natural = False # The natural status", "or (self.best_hand_value() == twenty_one and alt_text is not None) ): print(f\"Value: {self.hand_value()}\") return", "got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\")", "orientation defined by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck", "current round.\"\"\" self._active = False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card", "the original bet amount is returned. If it's a draw, the bet is", "Returns True if no further actions are possible in the current round, following", "dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if", "if it contains two cards with a total value of 21. Players and", "In typical game flow, this bet amount has already been verified as positive", "= [non_ace_sum] # Where the hand contains face-down cards, this block adds the", "face_dir=\"up\"): \"\"\" Removes one card from the input deck and adds this card", "player chooses to 'hit'. The outcome of each round is determined by the", "An empty string, returned so that the 'print_hand' method can be called by", "of the hand. Defaults to False. Returns ------- hand_value_list : list of int", "return empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand is bust, has value equal", "number of cards currently in the dealer's hand. If the dealer currently has", "None The best possible total value of the hand's constituent cards. If no", "the dealer. The dealer's hand is unique because: the first card dealt to", "class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir = \"down\"", "to 'hit'. The outcome of each round is determined by the relative values", "equivalent method on the base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif", "the base Hand class, initialising an empty hand object for the player. Parameters", "be dealt face-down, the freshly drawn card (face-up by default) calls its 'flip_card'", "appended to the hand array. Finally, the method calls '_validate_hand_status' that checks whether", "of their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object.", "the status of the dealer's hand. Where a payout is required, the amount", "optional argument will be printed instead of the hand owner's name if provided.", "and bet is collected (discarded) automatically with player's hand self._reveal_hand() print(\"Dealer has a", "Player wins 1.5x their original bet; multiplier is 2.5x so bet amount is", "a given participant. Parameters ---------- holder_name : str Defines the owner, or 'holder',", "subclasses with specialised methods and attributes). Within a round of blackjack, cards are", "self._bet = float( 0 ) # An attribute holding the amount bet by", "in the hand (within the hand object's '_live_hand' attribute). \"\"\" for card in", "the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the total value(s)", "True self.stand() elif self.best_hand_value() == twenty_one: self.stand() if len(self) == natural_length: self._natural =", "screen. \"\"\" self._bet = float( 0 ) # An attribute holding the amount", "to False signalling that no further actions are required by the player holding", "otherwise False (and the round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete =", "the hand in the current round (bust/stand = False; otherwise = True). A", "compare the relative scores. \"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one:", "of a hand object. A hand object is a collection of cards associated", "accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is None: self._bust = True self.stand() elif", "aces; sums face-up cards that aren't an ace for card in self: #", "== \"s\" if alt_text is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else:", "method whether to include face-down cards in calculating the value(s) of the hand.", "player's bet is deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo", "or face-down. By default, 'face_dir' is None when method is called against a", "then displayed when printing hand details to screen. \"\"\" self._bet = float( 0", "ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass defining the properties and methods", "the current round self._bust = False # The bust status communicates whether the", "hand objects to be iterated over, yielding constituent card objects in the order", "is lost. The value of the dealer's and player's hands are compared. If", "* payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class", "best_value = max([val for val in all_hand_values if val <= max_best_value]) except ValueError:", "natural and the dealer did not, they are immediately paid 1.5x the value", "by a player to the current hand object: at the end of a", "back into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's", "the freshly drawn card (face-up by default) calls its 'flip_card' method to ensure", "self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else:", "also read into 'bet_amount'. player_obj : blackjack.player.Player The player object that owns the", "in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or (self.best_hand_value() ==", "to the current hand object. face_dir : str Defines whether card is added", "(value > 21: returns True; otherwise False). Returns ------- bool True when lowest", "used to define the '_holder_name' attribute on the base class. This name is", "face-up cards that aren't an ace for card in self: # Try statement", "and alt_text is not None) ): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks", "dealer will always be dealt face-down; the dealer's turn in a single round", "deck_obj, face_dir=\"up\"): \"\"\" Removes one card from the input deck and adds this", "card try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum +=", "of all constituent card objects. Parameters ---------- bypass_face_down : bool Tells method whether", "As a boolean, returns 'bust' status of hand in the current round (value", "card objects. Parameters ---------- bypass_face_down : bool Tells method whether to include face-down", "has gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object", "the player's balance but they receive no winnings. If the player loses, the", "the hand's constituent cards. If no hand value <= 21, 'best_value' = None.", "the hand's combination of cards can take with no duplicates. For a hand", "class, 'PlayerHand' and 'DealerHand' subclasses, and related methods. \"\"\" import time draw_delay =", "this card from the deck. If the 'face_dir' input argument requires the hand", "in ace_sum_possibilities ] second_set = [ ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities", "self._natural = False # The natural status communicates whether the hand is a", "card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method", "object - a card will be removed from this deck and added to", "stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone", "class. This name is then displayed when printing hand details to screen. \"\"\"", "__repr__ method which must return a string-like object. \"\"\" empty_string = \"\" ends_with_s", "self._live_hand = ( [] ) # A list of card objects making up", "hand self._reveal_hand() print(\"Dealer has a natural!\") elif not self.is_natural() and player_hand.is_natural(): # Player", "amount is also deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does", "the consistent face-down string to the face-up values if face_down_count > 0: hand_value_list", "a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self, player_hand,", "self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one and alt_text is not None) ):", "bets at the end of the round; where the player loses, the method", "empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand is bust, has value equal to", "def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand object for a given participant.", "------- hand_value_list : list of int / str A list containing all possible", "self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer", "= ( [] ) # A list of card objects making up the", "hand object. In typical game flow, this bet amount has already been verified", "of blackjack, cards are added to a hand when the associated player chooses", "def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card from the input deck and", "hand (2 cards in hand and value = 21: returns True; otherwise False).", "the dealer! It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete", "a boolean, returns 'bust' status of hand in the current round (value >", "a boolean, returns 'natural' status of hand (2 cards in hand and value", "17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active():", "the '_holder_name' attribute on the base class. This name is then displayed when", "is also deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\")", "of each round is determined by the relative values of the player's and", "players score. As the dealer's hand is resolved, the players score is printed", "of the hand object bseing created: either 'Player' or 'Dealer'. Defaults to 'Player'", "True when hand can still receive cards in the current round; otherwise False.", "21), returns None. Returns ------- best_value : int or None The best possible", "seconds between drawn card actions twenty_one = 21 # Ideal score value for", "= False; otherwise = True). A hand is regarded as active in a", "A player's 'live' hand object. The 'natural' status of this hand is read", "cards until its value exceeds 17 or goes bust. The dealer's final hand", "value equal to 21 or is a natural. Updates hand status accordingly.\"\"\" natural_length", "> dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier", "As a boolean, returns 'natural' status of hand (2 cards in hand and", "Parameters ---------- holder_name : str Defines the owner, or 'holder', of the hand", "more cards in the current round.\"\"\" self._active = False def draw_card(self, deck_obj, face_dir=\"up\"):", "the player is informed that the dealer has gone bust. Parameters ---------- deck_obj", "cards with combined value of 21; otherwise False. \"\"\" return self._natural def stand(self):", "can take with no duplicates. For a hand with all cards face-up: returns", "currently has a single card in their hand, the card is dealt face-down;", "any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete else: round_complete = True bet_amount =", "method : str Prints the hand's owner followed by shorthand details of all", "hand is printed so the user can easily compare the relative scores. \"\"\"", "def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals and settles any bets as", "single round must be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__ method", "ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility + non_ace_sum for possibility in", "possibility in ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list = [non_ace_sum] # Where", "round_complete = False return round_complete else: round_complete = True bet_amount = player_hand.get_bet() if", "to 'Player' for this base hand class. \"\"\" self._live_hand = ( [] )", "21 or is a natural. Updates hand status accordingly.\"\"\" natural_length = 2 if", "ace_values : tuple A two-element tuple containing the possible card values an ace", "ace_sum_possibilities = [0] for ace_idx in range(ace_count): first_set = [ ace_values[0] + ace_sum_element", "or self.is_bust() or (self.best_hand_value() == twenty_one and alt_text is not None) ): print(f\"Value:", "self._reveal_hand() print(\"\\nSo does the dealer! It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount *", "special because bets can be made against these hands. \"\"\" def __init__(self, player_obj):", "be updated accordingly. Returns ------- round_complete : bool Returns True if no further", "are added to a hand when the associated player chooses to 'hit'. The", "None) ): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand is", "the 'deal_card' method of an input deck object, the deck returns a single", "Players' hands are special because bets can be made against these hands. \"\"\"", "possible ace values (additional loop over keys of dict?) \"\"\" ace_sum_possibilities = [0]", "both players class Hand: \"\"\" A class defining the properties and methods of", "single card in their hand, the card is dealt face-down; otherwise face-up. If", "a round of blackjack, cards are added to a hand when the associated", "current hand object: at the end of a round, the dealer resolves this", "dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's", "user can easily compare the relative scores. \"\"\" dealer_target = 17 print(player_score_message) if", "objects making up the hand; initialised as an empty list self._active = True", "in the hand as the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\"", "<= max_best_value]) except ValueError: best_value = None return best_value def is_active(self): \"\"\" As", "turn in a single round must be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls", "payout is required, this player's balance will be updated accordingly. Returns ------- round_complete", "the hand is also read into 'bet_amount'. player_obj : blackjack.player.Player The player object", "outcome of each round is determined by the relative values of the player's", "super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card from the input deck", "(dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): #", "second_set = [ ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities =", "ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 #", "and adds this card to the hand with orientation defined by 'face_dir'. Calls", "2.5x so bet amount is also deposited back into balance print(f\"\\n{player_obj.get_name()} has a", "value exceeds 21; otherwise False. \"\"\" return self._bust def is_natural(self): \"\"\" As a", "all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val for val in all_hand_values if val", "False. \"\"\" return self._active def is_bust(self): \"\"\" As a boolean, returns 'bust' status", "number of cards in the hand as the object 'length'.\"\"\" return len(self._live_hand) def", "details of all cards currently within the hand. Parameters ---------- alt_text : str", "dealt face-down; otherwise face-up. If the method is called with face_dir specified, it", "hand class. \"\"\" self._live_hand = ( [] ) # A list of card", "print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self):", "dealer's hand. Where a payout is required, the amount bet against the hand", "of dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx in range(ace_count): first_set = [", "hand; initialised as an empty list self._active = True # The active status", "and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand object", "(1, 11). Returns ------- ace_sum_possibilities : list of int A list containing each", "if len(self) == natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns", "currently within the hand. Parameters ---------- alt_text : str This optional argument will", "no further actions are required by the player holding the hand in the", "active status of the hand in the current round (bust/stand = False; otherwise", "object. face_dir : None / str Defines whether card is added to the", "face_dir not spelling 'up' (case-insensitive) will add the card face-down. Raises ------ AssertionError", "of the round; where the player loses, the method exits and their bet", "players score is printed each time the dealer's hand is printed so the", "= [ possibility + non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list = ace_sum", "if they go bust (> 21), the hands '_active' attribute is set to", "hand_value_list = ace_sum else: hand_value_list = [non_ace_sum] # Where the hand contains face-down", "len(self) == natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the", "{self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\")", "print(\"\\nSo does the dealer! It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier)", "round is determined by the relative values of the player's and dealer's hands.", "gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in the hand", "cards. If no hand value <= 21, 'best_value' = None. \"\"\" max_best_value =", "to be dealt face-down, the freshly drawn card (face-up by default) calls its", "= [ ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set", "= {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message)", "stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj):", "combine to make. TODO: Refactor to allow any number of possible ace values", "this base hand class. \"\"\" self._live_hand = ( [] ) # A list", "list self._active = True # The active status communicates whether the hand is", "because bets can be made against these hands. \"\"\" def __init__(self, player_obj): \"\"\"", "continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete else: round_complete", "class Hand: \"\"\" A class defining the properties and methods of a hand", "duplicates. For a hand with all cards face-up: returns a list of integers.", "of the hand as an integer. If hand value is bust (> 21),", "round, following the settling of naturals; otherwise False (and the round continues). \"\"\"", "It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self,", "the current round (bust/stand = False; otherwise = True). A hand is regarded", "dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining", "\"\"\" Initialises an empty hand object for a given participant. Parameters ---------- holder_name", "in ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list = [non_ace_sum] # Where the", "= None. \"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val", "or is a natural. Updates hand status accordingly.\"\"\" natural_length = 2 if self.best_hand_value()", "player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and dealer: player's", "input argument requires the hand to be dealt face-down, the freshly drawn card", "possible values associated with all face-up cards in the hand if ace_count >", "card will be added face-up with face_dir = 'up'. Any value of face_dir", "17, the dealer stands. If < 17, the hand draws cards until its", "self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility + non_ace_sum for possibility in ace_sum_possibilities ]", "participant. Parameters ---------- holder_name : str Defines the owner, or 'holder', of the", "any player who did not also get a natural. If a player gets", "( self.is_active() ), \"Cannot draw a card to this hand: it is marked", "empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes", "checks the dealer's hand value: if its best value > 17, the dealer", "object. A hand object is a collection of cards associated with either the", "cards can take with no duplicates. For a hand with all cards face-up:", "The amount bet against the hand object. In typical game flow, this bet", "print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active()", "a round. If the dealer gets a natural, the round is over and", "a natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's a stand-off!\") payout_multiplier = 1", "round starts and new hands are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A", "aces can combine to make. TODO: Refactor to allow any number of possible", "the hand's owner followed by shorthand details of all cards currently within the", "in the current round, following the settling of naturals; otherwise False (and the", "properties and methods specific to a hand object held by a player. Players'", ": tuple A two-element tuple containing the possible card values an ace can", "hand details. Returns ------- Output of 'print_hand' method : str Prints the hand's", "hand. Once a player decides to 'stand' at their hand's current value, or", "(value > 21) in the current round self._natural = False # The natural", "the hand. Once a player decides to 'stand' at their hand's current value,", "True # The active status communicates whether the hand is still active in", "by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object -", "card in their hand, the card is dealt face-down; otherwise face-up. If the", "self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score =", "further actions are possible in the current round, following the settling of naturals;", "+ ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort()", "Allows hand objects to be iterated over, yielding constituent card objects in the", "ensure the card is correctly face-down before it it is appended to the", "are required by the player holding the hand in the current round. Returns", "Parameters ---------- ace_count : int The number of ace cards to calculate possible", "player_hand.is_bust(): return if self.is_bust(): dealer_score = 0 else: dealer_score = self.best_hand_value() if dealer_score", "define the '_holder_name' attribute on the base class. This name is then displayed", "all cards in the hand face-up and prints hand details to the screen.\"\"\"", "is inactive (can't accept further cards). \"\"\" assert ( self.is_active() ), \"Cannot draw", "str This optional argument will be printed instead of the hand owner's name", "face_dir : str Defines whether card is added to the hand face-up or", "the deck. If the 'face_dir' input argument requires the hand to be dealt", "integers. For hands with any cards face-down: returns a list of strings. \"\"\"", "status of hand (2 cards in hand and value = 21: returns True;", "the relative scores. \"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've", "bet amount is also deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer", "list. Parameters ---------- ace_count : int The number of ace cards to calculate", "hand object: at the end of a round, the dealer resolves this bet.", "1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count += 1", "be called by the Hand class' __repr__ method which must return a string-like", "player's hands are compared. If the player wins, their player object is payed", "< dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\")", "\"\"\" As a boolean, returns the active status of the hand in the", "score is printed to the screen or the player is informed that the", "is collected (discarded) automatically with player's hand self._reveal_hand() print(\"Dealer has a natural!\") elif", "receive no winnings. If the player loses, the method exits and their balance", "hand can still receive cards in the current round; otherwise False. \"\"\" return", "can be made against these hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls the", "hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand object for a", "the card is determined by the number of cards currently in the dealer's", "hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\" Returns the best possible value of", "------- best_value : int or None The best possible total value of the", "If no hand value <= 21, 'best_value' = None. \"\"\" max_best_value = 21", "scores. \"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\")", "starts and new hands are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's", "a string-like object. \"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if", ": int The number of ace cards to calculate possible summed values for.", "self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in", "*-*\" * face_down_count for value in hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\"", "\"\"\" def __init__(self, player_obj): \"\"\" Calls the __init__ method of the base Hand", "object that owns the input 'player_hand'. Where a payout is required, this player's", "that owns the input 'player_hand'. Where a payout is required, this player's balance", "(can't accept further cards). \"\"\" assert ( self.is_active() ), \"Cannot draw a card", "their own respective subclasses with specialised methods and attributes). Within a round of", "21; otherwise False. \"\"\" return self._natural def stand(self): \"\"\"Updates hand status to inactive:", "chooses to draw no more cards in the current round.\"\"\" self._active = False", "\"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val for val", "will be updated accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot", "all cards currently within the hand. Parameters ---------- alt_text : str This optional", "11). Returns ------- ace_sum_possibilities : list of int A list containing each value", "to the face-up values if face_down_count > 0: hand_value_list = [ str(value) +", "over and they collect the bet of any player who did not also", "Method detects naturals and settles any bets as necessary; returns True if round", "import time draw_delay = 1 # The pause in seconds between drawn card", "against this hand: initially zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\"", "still be added to the hand. Once a player decides to 'stand' at", "is lost when a new round starts and new hands are initialised. Parameters", "( self.is_active() or self.is_bust() or (self.best_hand_value() == twenty_one and alt_text is not None)", "collection of cards associated with either the dealer or a player (each having", "= 1 # The pause in seconds between drawn card actions twenty_one =", "player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): # No action, round ends and bet", "hands are special because bets can be made against these hands. \"\"\" def", "self._bust = False # The bust status communicates whether the hand is bust", "card (face-up by default) calls its 'flip_card' method to ensure the card is", "the settling of naturals; otherwise False (and the round continues). \"\"\" if not", "cards as a sorted list. Parameters ---------- ace_count : int The number of", "A player's 'live' hand object. The value of this hand is read and", "natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values", "adds this card to the hand with orientation defined by 'face_dir'. Calls the", "settles any bets as necessary; returns True if round is concluded, otherwise False.", "\"\"\" Entering the reference for a hand object in the terminal triggers this", "methods and attributes). Within a round of blackjack, cards are added to a", "True if round is concluded, otherwise False. A hand is a 'natural' if", "is returned to the player's balance but they receive no winnings. If the", "are possible in the current round, following the settling of naturals; otherwise False", "object. The 'natural' status of this hand is read and compared to the", "for. ace_values : tuple A two-element tuple containing the possible card values an", "not self.is_natural() and player_hand.is_natural(): # Player wins 1.5x their original bet; multiplier is", "player_hand, player_obj): \"\"\" Method settles any bets at the end of the round;", "set used to define the '_holder_name' attribute on the base class. This name", "this hand is read and compared to the value of the dealer's hand.", "the dealer's and player's hands are compared. If the player wins, their player", "card face-down. Raises ------ AssertionError Raised when the hand is inactive (can't accept", "checks whether the hand is now bust and updates all hand statuses accordingly.", "current round self._natural = False # The natural status communicates whether the hand", "number of aces can combine to make. TODO: Refactor to allow any number", "dealer did not, they are immediately paid 1.5x the value of their bet.", "\"s\" if alt_text is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s", "value exceeds seventeen. Method initially checks the dealer's hand value: if its best", "loses, the method exits and their bet is lost. The value of the", "the relative values of the player's and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"):", "winnings. If the player loses, the method exits and their balance is uneffected.", "object. In typical game flow, this bet amount has already been verified as", "face-down cards in the hand; counts face-up aces; sums face-up cards that aren't", "the players score is printed each time the dealer's hand is printed so", "= None face_down_count = 0 non_ace_sum = 0 # Loop: counts number of", "if player_hand.is_bust(): return if self.is_bust(): dealer_score = 0 else: dealer_score = self.best_hand_value() if", "single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or self.is_bust() or (self.best_hand_value()", "elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self): print(f\"Card", "'_validate_hand_status' that checks whether the hand is now bust and updates all hand", "and added to the dealer's hand object. face_dir : None / str Defines", "the value of their bet plus the original bet amount is returned. If", "player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand()", "over keys of dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx in range(ace_count): first_set", "= holder_name def __iter__(self): \"\"\" Allows hand objects to be iterated over, yielding", "has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in the", "the hand is a natural (value = 21 with 2 cards) self._holder_name =", "settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals and settles any bets as necessary;", "\"\"\" Returns the best possible value of the hand as an integer. If", "'Dealer'. Defaults to 'Player' for this base hand class. \"\"\" self._live_hand = (", "1 # This if-else block defines a list of possible values associated with", "all possible values the hand's combination of cards can take with no duplicates.", "inactive (can't accept further cards). \"\"\" assert ( self.is_active() ), \"Cannot draw a", "when card contains two cards with combined value of 21; otherwise False. \"\"\"", "deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does the dealer!", "players class Hand: \"\"\" A class defining the properties and methods of a", "player loses, the method exits and their balance is uneffected. The bet placed", "value = 21: returns True; otherwise False). Returns ------- bool True when card", "has already been removed from the player's balance. \"\"\" self._bet += amount def", "in all_hand_values if val <= max_best_value]) except ValueError: best_value = None return best_value", "that the dealer has gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck The game's", "hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\")", "provided. Returns ------- empty_string : str An empty string, returned so that the", "natural status communicates whether the hand is a natural (value = 21 with", "and their bet is lost. The value of the dealer's and player's hands", "card in self._live_hand: yield card def __repr__(self): \"\"\" Entering the reference for a", "shorthand details of all cards currently within the hand. Parameters ---------- alt_text :", "first two cards at the start of a round. If the dealer gets", "given participant. Parameters ---------- holder_name : str Defines the owner, or 'holder', of", "lost. The value of the dealer's and player's hands are compared. If the", "cannot be settled between the dealer and a player unless both participants have", "> 21: returns True; otherwise False). Returns ------- bool True when lowest possible", "if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir)", "value of the dealer's hand. Where a payout is required, the amount bet", "an empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\"", "return hand_value_list def best_hand_value(self): \"\"\" Returns the best possible value of the hand", "the 'print_hand' method can be called by the Hand class' __repr__ method which", "they are immediately paid 1.5x the value of their bet. Parameters ---------- player_hand", "the method is called with face_dir specified, it behaves identically to the equivalent", "hand is still active in the current round self._bust = False # The", "is bust, has value equal to 21 or is a natural. Updates hand", "card object and deletes this card from the deck. If the 'face_dir' input", "it is appended to the hand array. Finally, the method calls '_validate_hand_status' that", "False). Returns ------- bool True when lowest possible hand value exceeds 21; otherwise", "current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def", "Calls the __init__ method of the base Hand class, initialising an empty hand", "when player chooses to draw no more cards in the current round.\"\"\" self._active", "\"\"\" This method automatically resolves the dealer's hand: drawing cards until the hand", "ace values (additional loop over keys of dict?) \"\"\" ace_sum_possibilities = [0] for", "payed the value of their bet plus the original bet amount is returned.", "the hand; counts face-up aces; sums face-up cards that aren't an ace for", "encounters a face-down card try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down)", "contains two cards with a total value of 21. Players and dealers can", "# A list of card objects making up the hand; initialised as an", "hand face-up and prints hand details to the screen.\"\"\" print(\"\\n---------------\") for card in", "AssertionError Raised when the hand is inactive (can't accept further cards). \"\"\" assert", "payout is required, the amount bet against the hand is also read into", "ace for card in self: # Try statement catches AssertionErrors thrown when 'is_ace'", "boolean, returns 'bust' status of hand in the current round (value > 21:", "value 'ace_count' number of aces can combine to make. TODO: Refactor to allow", "player's hand self._reveal_hand() print(\"Dealer has a natural!\") elif not self.is_natural() and player_hand.is_natural(): #", "bet amount is returned. If it's a draw, the bet is returned to", "Returns the total value(s) of the target hand by summing the values of", "if ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility +", "Prints the hand's owner followed by shorthand details of all cards currently within", "back into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\") payout_multiplier = 2.5", "while cards can still be added to the hand. Once a player decides", "be added to the hand. Once a player decides to 'stand' at their", "the method exits and their bet is lost. The value of the dealer's", "removed from this deck and added to the dealer's hand object. face_dir :", "freshly drawn card (face-up by default) calls its 'flip_card' method to ensure the", "of int / str A list containing all possible values the hand's combination", "is_bust(self): \"\"\" As a boolean, returns 'bust' status of hand in the current", "status accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is None: self._bust = True self.stand()", "still active in the current round self._bust = False # The bust status", "hand status accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is None: self._bust = True", "False. A hand is a 'natural' if it contains two cards with a", "and added to the current hand object. face_dir : str Defines whether card", "possibility + non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list", "and settles any bets as necessary; returns True if round is concluded, otherwise", "else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This", "to a hand object held by the dealer. The dealer's hand is unique", "'live' hand object. Allows the player's hand to be printed for comparison as", "is resolved. player_score_message : str A string that communicates the players score. As", "been removed from the player's balance. \"\"\" self._bet += amount def get_bet(self): \"\"\"Returns", "card : blackjack.card.Card The next card in the hand (within the hand object's", "target hand by summing the values of all constituent card objects. Parameters ----------", "identically to the equivalent method on the base Hand class. \"\"\" if face_dir:", "already been verified as positive and has already been removed from the player's", "exceeds 21; otherwise False. \"\"\" return self._bust def is_natural(self): \"\"\" As a boolean,", "in seconds between drawn card actions twenty_one = 21 # Ideal score value", "non_ace_sum = 0 # Loop: counts number of face-down cards in the hand;", "A hand is a 'natural' if it contains two cards with a total", "bet is returned to the player's balance but they receive no winnings. If", "object. player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. Allows the player's hand", "# Try statement catches AssertionErrors thrown when 'is_ace' method encounters a face-down card", "deck. If the 'face_dir' input argument requires the hand to be dealt face-down,", "the hand object's '_live_hand' attribute). \"\"\" for card in self._live_hand: yield card def", "orientation defined by 'face_dir'. Calls the 'deal_card' method of an input deck object,", "that checks whether the hand is now bust and updates all hand statuses", "round.\"\"\" self._active = False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card from", "True bet_amount = player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): # No action, round", "print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer", "True; otherwise False). Returns ------- bool True when lowest possible hand value exceeds", "player_obj): \"\"\" Calls the __init__ method of the base Hand class, initialising an", "this method, printing all hand details. Returns ------- Output of 'print_hand' method :", "if not any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete else: round_complete = True", "round_complete : bool Returns True if no further actions are possible in the", "status communicates whether the hand is a natural (value = 21 with 2", "correctly face-down before it it is appended to the hand array. Finally, the", "natural!\") elif not self.is_natural() and player_hand.is_natural(): # Player wins 1.5x their original bet;", "> player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier =", "Initialises an empty hand object for a given participant. Parameters ---------- holder_name :", "a collection of ace cards as a sorted list. Parameters ---------- ace_count :", "), \"Cannot draw a card to this hand: it is marked as inactive", "(> 21), the hands '_active' attribute is set to False signalling that no", "'_live_hand' attribute). \"\"\" for card in self._live_hand: yield card def __repr__(self): \"\"\" Entering", "further actions are required by the player holding the hand in the current", "so bet amount is also deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural", "= card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 # This", "can get naturals upon drawing their first two cards at the start of", "gone bust. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object -", "input deck and adds this card to the hand with orientation defined by", "actions are required by the player holding the hand in the current round.", "bet made by a player to the current hand object: at the end", "value of 21. Players and dealers can get naturals upon drawing their first", "The 'natural' status of this hand is read and compared to the status", "hand is a natural (value = 21 with 2 cards) self._holder_name = holder_name", "the hand contains face-down cards, this block adds the consistent face-down string to", "Returns ------- empty_string : str An empty string, returned so that the 'print_hand'", "exits and their balance is uneffected. The bet placed against their hand is", "created: either 'Player' or 'Dealer'. Defaults to 'Player' for this base hand class.", "bet_amount = player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): # No action, round ends", "initially zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a bet", "card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError:", "may be removed from this deck and added to the dealer's hand object.", ": str A string that communicates the players score. As the dealer's hand", "Tells method whether to include face-down cards in calculating the value(s) of the", "within the hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len() to be used", "subclasses, and related methods. \"\"\" import time draw_delay = 1 # The pause", "player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in the hand face-up and", "round (value > 21: returns True; otherwise False). Returns ------- bool True when", "a natural. If a player gets a natural and the dealer did not,", "a collection of cards associated with either the dealer or a player (each", "Finally, the method calls '_validate_hand_status' that checks whether the hand is now bust", "int or None The best possible total value of the hand's constituent cards.", "False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card from the input deck", "whether to include face-down cards in calculating the value(s) of the hand. Defaults", "The dealer's hand is unique because: the first card dealt to the dealer", "base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir", "str(value) + \" + *-*\" * face_down_count for value in hand_value_list ] return", "cards face-down: returns a list of strings. \"\"\" ace_count = 0 ace_values =", "the 'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related methods. \"\"\" import time", "total value(s) of the target hand by summing the values of all constituent", "hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if", "otherwise face-up. If the method is called with face_dir specified, it behaves identically", "for possibility in ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list = [non_ace_sum] #", "automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__ method of the base Hand class,", "self.is_bust(): dealer_score = 0 else: dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return", "By default, 'face_dir' is None when method is called against a dealer's hand", ": blackjack.deck.Deck The game's 'live' deck object - cards may be removed from", "elif self.best_hand_value() == twenty_one: self.stand() if len(self) == natural_length: self._natural = True @staticmethod", "self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the hand's owner followed by shorthand", "the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card from the", "\"\"\"Calls the __init__ method of the base Hand class, initialising an empty hand", "of the hand in the current round (bust/stand = False; otherwise = True).", "current round (value > 21: returns True; otherwise False). Returns ------- bool True", "and the dealer did not, they are immediately paid 1.5x the value of", "in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self,", "the base class. This name is then displayed when printing hand details to", "self._bet += amount def get_bet(self): \"\"\"Returns the amount bet against this player's hand", "self._bust = True self.stand() elif self.best_hand_value() == twenty_one: self.stand() if len(self) == natural_length:", "plus the original bet amount is returned. If it's a draw, the bet", "of a round. If the dealer gets a natural, the round is over", "ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand): \"\"\" A subclass", "(case-insensitive) will add the card face-down. Raises ------ AssertionError Raised when the hand", "= self._holder_name[-1].lower() == \"s\" if alt_text is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}'", "= player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a bet made by a", "self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards", "def is_active(self): \"\"\" As a boolean, returns the active status of the hand", "draws cards until its value exceeds 17 or goes bust. The dealer's final", "cards that aren't an ace for card in self: # Try statement catches", "a boolean, returns the active status of the hand in the current round", "round. Returns ------- bool True when hand can still receive cards in the", "if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all", "are compared. If the player wins, their player object is payed the value", "= 0 else: dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else: bet_amount", "unless both participants have 'stood' or gone bust.\" if player_hand.is_bust(): return if self.is_bust():", "If it's a draw, the bet is returned to the player's balance but", "Where a payout is required, this player's balance will be updated accordingly. Returns", "Refactor to allow any number of possible ace values (additional loop over keys", "if alt_text is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\")", "subclass defining the properties and methods specific to a hand object held by", "dealer stands. If < 17, the hand draws cards until its value exceeds", "the dealer resolves this bet. Parameters ---------- amount : float The amount bet", "method, printing all hand details. Returns ------- Output of 'print_hand' method : str", "A class defining the properties and methods of a hand object. A hand", "player_score_message : str A string that communicates the players score. As the dealer's", "defining the properties and methods specific to a hand object held by a", "empty_string = \"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text is not None:", "dealer's hand is unique because: the first card dealt to the dealer will", "dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one card from the input", "to draw no more cards in the current round.\"\"\" self._active = False def", "This name is then displayed when printing hand details to screen. \"\"\" self._bet", "attribute is set to False signalling that no further actions are required by", "to the dealer will always be dealt face-down; the dealer's turn in a", "the dealer's hand is resolved. player_score_message : str A string that communicates the", "resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__ method of the base Hand", "make. TODO: Refactor to allow any number of possible ace values (additional loop", "object. face_dir : str Defines whether card is added to the hand face-up", "the player's and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty", "lost when a new round starts and new hands are initialised. Parameters ----------", "initialised. The name of this player is queried and set used to define", "time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message)", "current round. Returns ------- bool True when hand can still receive cards in", "None when method is called against a dealer's hand object. Where None, the", "of the target hand by summing the values of all constituent card objects.", "triggered when player chooses to draw no more cards in the current round.\"\"\"", "relative values of the player's and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\"", "is called with face_dir specified, it behaves identically to the equivalent method on", "This method automatically resolves the dealer's hand: drawing cards until the hand value", "object. Where None, the orientation of the card is determined by the number", "be added face-up with face_dir = 'up'. Any value of face_dir not spelling", "a face-down card try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values = card.card_value(bypass_face_down) else:", "with orientation defined by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live'", "pause in seconds between drawn card actions twenty_one = 21 # Ideal score", "as an integer. If hand value is bust (> 21), returns None. Returns", "\"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text is not None: print(alt_text) elif", "to the hand with orientation defined by 'face_dir'. Parameters ---------- deck_obj : blackjack.deck.Deck", "printed so the user can easily compare the relative scores. \"\"\" dealer_target =", "method which must return a string-like object. \"\"\" empty_string = \"\" ends_with_s =", ": str An empty string, returned so that the 'print_hand' method can be", "it's a draw, the bet is returned to the player's balance but they", "that the 'print_hand' method can be called by the Hand class' __repr__ method", "2 cards) self._holder_name = holder_name def __iter__(self): \"\"\" Allows hand objects to be", "if its best value > 17, the dealer stands. If < 17, the", "Returns ------- hand_value_list : list of int / str A list containing all", "values the hand's combination of cards can take with no duplicates. For a", "the current hand object. face_dir : str Defines whether card is added to", "object held by the dealer. The dealer's hand is unique because: the first", "time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals and settles any bets", "it is marked as inactive in the current round.\" drawn_card = deck_obj.deal_card() if", "include face-down cards in calculating the value(s) of the hand. Defaults to False.", "added face-up with face_dir = 'up'. Any value of face_dir not spelling 'up'", "screen.\"\"\" print(\"\\n---------------\") for card in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\")", "---------- alt_text : str This optional argument will be printed instead of the", "player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method settles any", "into 'bet_amount'. player_obj : blackjack.player.Player The player object that owns the input 'player_hand'.", "'_active' attribute is set to False signalling that no further actions are required", "added to a hand when the associated player chooses to 'hit'. The outcome", "self._reveal_hand() while self.is_active(): if self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\")", "elif player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\"", "str An empty string, returned so that the 'print_hand' method can be called", "+ \" + *-*\" * face_down_count for value in hand_value_list ] return hand_value_list", "twenty_one = 21 # Ideal score value for both players class Hand: \"\"\"", "player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a bet made by", "'live' hand object. The 'natural' status of this hand is read and compared", "in hand and value = 21: returns True; otherwise False). Returns ------- bool", "Returns ------- round_complete : bool Returns True if no further actions are possible", "one card from the input deck and adds this card to the hand", "amount bet by a player against this hand: initially zero player_name = player_obj.get_name()", "the dealer currently has a single card in their hand, the card is", "the best possible value of the hand as an integer. If hand value", "with a total value of 21. Players and dealers can get naturals upon", "when printing hand details to screen. \"\"\" self._bet = float( 0 ) #", "blackjack.hand.PlayerHand A player's 'live' hand object. Allows the player's hand to be printed", "'natural' status of hand (2 cards in hand and value = 21: returns", ") # An attribute holding the amount bet by a player against this", "hand is inactive (can't accept further cards). \"\"\" assert ( self.is_active() ), \"Cannot", "specific to a hand object held by the dealer. The dealer's hand is", "the hand with orientation defined by 'face_dir'. Calls the 'deal_card' method of an", "the dealer will always be dealt face-down; the dealer's turn in a single", "active in the current round self._bust = False # The bust status communicates", "calculating the value(s) of the hand. Defaults to False. Returns ------- hand_value_list :", "printed to the screen or the player is informed that the dealer has", "a player to the current hand object: at the end of a round,", "bet is lost. The value of the dealer's and player's hands are compared.", "whether the hand is bust (value > 21) in the current round self._natural", "the card face-down. Raises ------ AssertionError Raised when the hand is inactive (can't", "hand's combination of cards can take with no duplicates. For a hand with", "the end of a round, the dealer resolves this bet. Parameters ---------- amount", "or face-down. By default, the card will be added face-up with face_dir =", "hands with any cards face-down: returns a list of strings. \"\"\" ace_count =", "to calculate possible summed values for. ace_values : tuple A two-element tuple containing", "has already been verified as positive and has already been removed from the", "hand is read and compared to the value of the dealer's hand. Where", "constituent card objects. Parameters ---------- bypass_face_down : bool Tells method whether to include", "__init__(self, player_obj): \"\"\" Calls the __init__ method of the base Hand class, initialising", "= False def draw_card(self, deck_obj, face_dir=\"up\"): \"\"\" Removes one card from the input", "player's hand to be printed for comparison as the dealer's hand is resolved.", "object in the terminal triggers this method, printing all hand details. Returns -------", "face_down_count for value in hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\" Returns the", "further cards). \"\"\" assert ( self.is_active() ), \"Cannot draw a card to this", "bet; multiplier is 2.5x so bet amount is also deposited back into balance", "bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The 'natural'", "hand object. The 'natural' status of this hand is read and compared to", "= \"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text is not None: print(alt_text)", "self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals", "removed from this deck and added to the dealer's hand object. player_hand :", "the hand (within the hand object's '_live_hand' attribute). \"\"\" for card in self._live_hand:", "If the player wins, their player object is payed the value of their", "== dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass", "keys of dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx in range(ace_count): first_set =", "hand is also read into 'bet_amount'. player_obj : blackjack.player.Player The player object that", "from this deck and added to the dealer's hand object. face_dir : None", "combination of cards can take with no duplicates. For a hand with all", "\"\"\" return self._active def is_bust(self): \"\"\" As a boolean, returns 'bust' status of", "val <= max_best_value]) except ValueError: best_value = None return best_value def is_active(self): \"\"\"", "def is_natural(self): \"\"\" As a boolean, returns 'natural' status of hand (2 cards", "str A list containing all possible values the hand's combination of cards can", "the equivalent method on the base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj, face_dir)", "and 'DealerHand' subclasses, and related methods. \"\"\" import time draw_delay = 1 #", "\"\"\" ace_sum_possibilities = [0] for ace_idx in range(ace_count): first_set = [ ace_values[0] +", "natural (dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())):", "method exits and their balance is uneffected. The bet placed against their hand", "is correctly face-down before it it is appended to the hand array. Finally,", "None: self._bust = True self.stand() elif self.best_hand_value() == twenty_one: self.stand() if len(self) ==", "not spelling 'up' (case-insensitive) will add the card face-down. Raises ------ AssertionError Raised", "of the hand owner's name if provided. Returns ------- empty_string : str An", "it it is appended to the hand array. Finally, the method calls '_validate_hand_status'", "hand\") for idx, single_card in enumerate(self): print(f\"Card {idx}: {single_card.short_card_details()}\") if ( self.is_active() or", "hand status to inactive: triggered when player chooses to draw no more cards", "the possible values of a collection of ace cards as a sorted list.", "string that communicates the players score. As the dealer's hand is resolved, the", "Calls the 'deal_card' method of an input deck object, the deck returns a", "balance will be updated accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets", "face-down before it it is appended to the hand array. Finally, the method", "in range(ace_count): first_set = [ ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities ]", "def is_bust(self): \"\"\" As a boolean, returns 'bust' status of hand in the", "summed values for. ace_values : tuple A two-element tuple containing the possible card", "of naturals; otherwise False (and the round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())):", "face-down. Raises ------ AssertionError Raised when the hand is inactive (can't accept further", "initialised as an empty list self._active = True # The active status communicates", "to the hand with orientation defined by 'face_dir'. Calls the 'deal_card' method of", "deck and added to the current hand object. face_dir : str Defines whether", "dealer's hand. If the dealer currently has a single card in their hand,", "def best_hand_value(self): \"\"\" Returns the best possible value of the hand as an", "+ *-*\" * face_down_count for value in hand_value_list ] return hand_value_list def best_hand_value(self):", ": float The amount bet against the hand object. In typical game flow,", "bypass_face_down=False): \"\"\" Returns the total value(s) of the target hand by summing the", "len() to be used on hand objects, returning the number of cards in", "is added to the hand face-up or face-down. By default, 'face_dir' is None", "def stand(self): \"\"\"Updates hand status to inactive: triggered when player chooses to draw", "Hand class' __repr__ method which must return a string-like object. \"\"\" empty_string =", "card in the hand (within the hand object's '_live_hand' attribute). \"\"\" for card", "total value of the hand's constituent cards. If no hand value <= 21,", "until the hand value exceeds seventeen. Method initially checks the dealer's hand value:", "Adds a bet made by a player to the current hand object: at", "two-element tuple containing the possible card values an ace can take e.g. (1,", "= float( 0 ) # An attribute holding the amount bet by a", "round self._bust = False # The bust status communicates whether the hand is", "/ str Defines whether card is added to the hand face-up or face-down.", "reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def settle_naturals(self, player_hand, player_obj): \"\"\" Method detects naturals and", "print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break", "'PlayerHand' and 'DealerHand' subclasses, and related methods. \"\"\" import time draw_delay = 1", "!= \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the hand's owner", "to the hand array. Finally, the method calls '_validate_hand_status' that checks whether the", "Where None, the orientation of the card is determined by the number of", "------ card : blackjack.card.Card The next card in the hand (within the hand", "for the player. Parameters ---------- player_obj : blackjack.player.Player The player object that owns", "round_complete = True bet_amount = player_hand.get_bet() if self.is_natural() and not player_hand.is_natural(): # No", "with orientation defined by 'face_dir'. Calls the 'deal_card' method of an input deck", "cards). \"\"\" assert ( self.is_active() ), \"Cannot draw a card to this hand:", "is a natural. Updates hand status accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is", "+ non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list =", "\"\"\"Turns all cards in the hand face-up and prints hand details to the", "loop over keys of dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx in range(ace_count):", "None, the orientation of the card is determined by the number of cards", "accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled between", "of cards can take with no duplicates. For a hand with all cards", "be iterated over, yielding constituent card objects in the order they were added.", "initialising an empty hand object for the player. Parameters ---------- player_obj : blackjack.player.Player", "dealer's hand object. Where None, the orientation of the card is determined by", "does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off", "# An attribute holding the amount bet by a player against this hand:", "containing each value 'ace_count' number of aces can combine to make. TODO: Refactor", "the player. Parameters ---------- player_obj : blackjack.player.Player The player object that owns the", "be resolved automatically. \"\"\" def __init__(self): \"\"\"Calls the __init__ method of the base", "card is added to the hand face-up or face-down. By default, 'face_dir' is", "max_best_value]) except ValueError: best_value = None return best_value def is_active(self): \"\"\" As a", "the hand being initialised. The name of this player is queried and set", "not player_hand.is_natural(): # No action, round ends and bet is collected (discarded) automatically", "player_hand.is_natural(): # Player wins 1.5x their original bet; multiplier is 2.5x so bet", "block adds the consistent face-down string to the face-up values if face_down_count >", "calls '_validate_hand_status' that checks whether the hand is now bust and updates all", "the terminal triggers this method, printing all hand details. Returns ------- Output of", "super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message):", "player_hand.best_hand_value() == dealer_score: payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A", "dealers can get naturals upon drawing their first two cards at the start", "cards in calculating the value(s) of the hand. Defaults to False. Returns -------", "the hand is still active in the current round self._bust = False #", "[ possibility + non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list = ace_sum else:", "the bet is returned to the player's balance but they receive no winnings.", "] hand_value_list = ace_sum else: hand_value_list = [non_ace_sum] # Where the hand contains", "balance but they receive no winnings. If the player loses, the method exits", "hand score is printed to the screen or the player is informed that", "if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the", "when method is called against a dealer's hand object. Where None, the orientation", "player's balance will be updated accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active()) ),", "to be used on hand objects, returning the number of cards in the", "will add the card face-down. Raises ------ AssertionError Raised when the hand is", "contains two cards with combined value of 21; otherwise False. \"\"\" return self._natural", "the hand. Parameters ---------- alt_text : str This optional argument will be printed", "returned to the player's balance but they receive no winnings. If the player", "The active status communicates whether the hand is still active in the current", "adds this card to the hand with orientation defined by 'face_dir'. Parameters ----------", "\"\"\" Returns the possible values of a collection of ace cards as a", "a hand when the associated player chooses to 'hit'. The outcome of each", "and a player unless both participants have 'stood' or gone bust.\" if player_hand.is_bust():", "\"\"\" ace_count = 0 ace_values = None face_down_count = 0 non_ace_sum = 0", "details to screen. \"\"\" self._bet = float( 0 ) # An attribute holding", "the reference for a hand object in the terminal triggers this method, printing", "cards in the hand face-up and prints hand details to the screen.\"\"\" print(\"\\n---------------\")", "Entering the reference for a hand object in the terminal triggers this method,", "constituent card objects in the order they were added. Yields ------ card :", "\"\"\" Adds a bet made by a player to the current hand object:", "all constituent card objects. Parameters ---------- bypass_face_down : bool Tells method whether to", "cards with a total value of 21. Players and dealers can get naturals", "draw, the bet is returned to the player's balance but they receive no", "has a natural (dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif", "the current round. Returns ------- bool True when hand can still receive cards", "@staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values of a collection of", "added to the current hand object. face_dir : str Defines whether card is", "Parameters ---------- player_obj : blackjack.player.Player The player object that owns the hand being", "to the equivalent method on the base Hand class. \"\"\" if face_dir: super().draw_card(deck_obj,", "settles any bets at the end of the round; where the player loses,", "add the card face-down. Raises ------ AssertionError Raised when the hand is inactive", "deck returns a single card object and deletes this card from the deck.", "str Defines the owner, or 'holder', of the hand object bseing created: either", "single card object and deletes this card from the deck. If the 'face_dir'", "False. \"\"\" return self._natural def stand(self): \"\"\"Updates hand status to inactive: triggered when", "is required, this player's balance will be updated accordingly. \"\"\" assert not any(", "self.best_hand_value() == twenty_one: self.stand() if len(self) == natural_length: self._natural = True @staticmethod def", "communicates whether the hand is bust (value > 21) in the current round", "print(player_score_message) if player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if", "empty hand object for the player. Parameters ---------- player_obj : blackjack.player.Player The player", "object. Allows the player's hand to be printed for comparison as the dealer's", "assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled between the dealer", "ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class", "alt_text is not None) ): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether", "this deck and added to the dealer's hand object. face_dir : None /", "super().draw_card(deck_obj, face_dir) elif len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir", "get naturals upon drawing their first two cards at the start of a", "The bust status communicates whether the hand is bust (value > 21) in", "number of face-down cards in the hand; counts face-up aces; sums face-up cards", "determined by the relative values of the player's and dealer's hands. \"\"\" def", "of the player's and dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an", "payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player", "hand: initially zero player_name = player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a", "to the player's balance but they receive no winnings. If the player loses,", "owns the hand being initialised. The name of this player is queried and", "a round while cards can still be added to the hand. Once a", "counts number of face-down cards in the hand; counts face-up aces; sums face-up", "= None return best_value def is_active(self): \"\"\" As a boolean, returns the active", "current round; otherwise False. \"\"\" return self._active def is_bust(self): \"\"\" As a boolean,", ": list of int A list containing each value 'ace_count' number of aces", "bool True when card contains two cards with combined value of 21; otherwise", "player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. Allows the player's hand to", "the deck returns a single card object and deletes this card from the", "bool True when lowest possible hand value exceeds 21; otherwise False. \"\"\" return", "\"\"\"Checks whether the hand is bust, has value equal to 21 or is", "[ ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set = [ ace_values[1]", "to the status of the dealer's hand. Where a payout is required, the", "a player (each having their own respective subclasses with specialised methods and attributes).", "where the player loses, the method exits and their bet is lost. The", "also deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\") payout_multiplier", "a player unless both participants have 'stood' or gone bust.\" if player_hand.is_bust(): return", "== 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir)", "Yields ------ card : blackjack.card.Card The next card in the hand (within the", "= 2 if self.best_hand_value() is None: self._bust = True self.stand() elif self.best_hand_value() ==", "having their own respective subclasses with specialised methods and attributes). Within a round", "None / str Defines whether card is added to the hand face-up or", "def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method automatically resolves the dealer's hand:", "payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and dealer: player's bet is", "player_score_message): \"\"\" This method automatically resolves the dealer's hand: drawing cards until the", "= self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility + non_ace_sum for possibility in ace_sum_possibilities", "this card to the hand with orientation defined by 'face_dir'. Calls the 'deal_card'", "to the hand face-up or face-down. By default, 'face_dir' is None when method", "ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return", "and they collect the bet of any player who did not also get", "new hands are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand", "hand object for the player. Parameters ---------- player_obj : blackjack.player.Player The player object", "to define the '_holder_name' attribute on the base class. This name is then", "hand value: if its best value > 17, the dealer stands. If <", "the dealer stands. If < 17, the hand draws cards until its value", "will be added face-up with face_dir = 'up'. Any value of face_dir not", "natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount", "print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has gone bust!\")", "card to the hand with orientation defined by 'face_dir'. Parameters ---------- deck_obj :", "the current round (value > 21: returns True; otherwise False). Returns ------- bool", "Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object - cards may", "except AssertionError: face_down_count += 1 # This if-else block defines a list of", "player_hand.best_hand_value() == twenty_one: print(\"You've got 21!\") time.sleep(draw_delay) self._reveal_hand() while self.is_active(): if self.best_hand_value() <", "list of int A list containing each value 'ace_count' number of aces can", "If < 17, the hand draws cards until its value exceeds 17 or", "empty list self._active = True # The active status communicates whether the hand", "of 21; otherwise False. \"\"\" return self._natural def stand(self): \"\"\"Updates hand status to", "[] ) # A list of card objects making up the hand; initialised", "the first card dealt to the dealer will always be dealt face-down; the", "drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None):", "the dealer's hand is printed so the user can easily compare the relative", "self._holder_name = holder_name def __iter__(self): \"\"\" Allows hand objects to be iterated over,", "lowest possible hand value exceeds 21; otherwise False. \"\"\" return self._bust def is_natural(self):", "compared to the status of the dealer's hand. Where a payout is required,", "a natural!\") elif not self.is_natural() and player_hand.is_natural(): # Player wins 1.5x their original", "any bets as necessary; returns True if round is concluded, otherwise False. A", "be removed from this deck and added to the current hand object. face_dir", "within the hand. Parameters ---------- alt_text : str This optional argument will be", "amount bet against the hand object. In typical game flow, this bet amount", "round while cards can still be added to the hand. Once a player", "an ace can take e.g. (1, 11). Returns ------- ace_sum_possibilities : list of", "player's balance but they receive no winnings. If the player loses, the method", "with 2 cards) self._holder_name = holder_name def __iter__(self): \"\"\" Allows hand objects to", "and dealer: player's bet is deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural!\")", "cards currently in the dealer's hand. If the dealer currently has a single", "the associated player chooses to 'hit'. The outcome of each round is determined", "is determined by the relative values of the player's and dealer's hands. \"\"\"", "their bet is lost. The value of the dealer's and player's hands are", "containing all possible values the hand's combination of cards can take with no", "21. Players and dealers can get naturals upon drawing their first two cards", "defined by 'face_dir'. Calls the 'deal_card' method of an input deck object, the", "be dealt face-down; the dealer's turn in a single round must be resolved", "when the hand is inactive (can't accept further cards). \"\"\" assert ( self.is_active()", "= ace_sum else: hand_value_list = [non_ace_sum] # Where the hand contains face-down cards,", "communicates whether the hand is a natural (value = 21 with 2 cards)", "not None) ): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand", "and attributes). Within a round of blackjack, cards are added to a hand", "player's 'live' hand object. Allows the player's hand to be printed for comparison", "cards are added to a hand when the associated player chooses to 'hit'.", "False return round_complete else: round_complete = True bet_amount = player_hand.get_bet() if self.is_natural() and", "card in self: # Try statement catches AssertionErrors thrown when 'is_ace' method encounters", "21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val for val in all_hand_values if", "1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining the properties and", "return round_complete else: round_complete = True bet_amount = player_hand.get_bet() if self.is_natural() and not", "printing all hand details. Returns ------- Output of 'print_hand' method : str Prints", "21), the hands '_active' attribute is set to False signalling that no further", "(additional loop over keys of dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx in", "Try statement catches AssertionErrors thrown when 'is_ace' method encounters a face-down card try:", "the player's balance. \"\"\" self._bet += amount def get_bet(self): \"\"\"Returns the amount bet", "player object that owns the hand being initialised. The name of this player", "can be called by the Hand class' __repr__ method which must return a", "the properties and methods specific to a hand object held by the dealer.", "value(s) of the target hand by summing the values of all constituent card", "payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining the", "# Stand-off between player and dealer: player's bet is deposited back into balance", "elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and dealer: player's bet is deposited", "comparison as the dealer's hand is resolved. player_score_message : str A string that", "): print(f\"Value: {self.hand_value()}\") return empty_string def _verify_hand_status(self): \"\"\"Checks whether the hand is bust,", "* payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and dealer: player's bet", "A list containing each value 'ace_count' number of aces can combine to make.", "is determined by the number of cards currently in the dealer's hand. If", "round (bust/stand = False; otherwise = True). A hand is regarded as active", ": str Prints the hand's owner followed by shorthand details of all cards", "(self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled between the dealer and a player", "ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for idx, single_card in enumerate(self): print(f\"Card {idx}:", "def add_bet(self, amount): \"\"\" Adds a bet made by a player to the", "dict?) \"\"\" ace_sum_possibilities = [0] for ace_idx in range(ace_count): first_set = [ ace_values[0]", "values of a collection of ace cards as a sorted list. Parameters ----------", "required by the player holding the hand in the current round. Returns -------", "current hand object. face_dir : str Defines whether card is added to the", "deck_obj, face_dir=None): \"\"\" Removes one card from the input deck and adds this", "player is informed that the dealer has gone bust. Parameters ---------- deck_obj :", "required, this player's balance will be updated accordingly. \"\"\" assert not any( (self.is_active(),", "against the hand object. In typical game flow, this bet amount has already", "\"down\" super().draw_card(deck_obj, face_dir) else: face_dir = \"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand,", "the player loses, the method exits and their balance is uneffected. The bet", "player_hand.best_hand_value(): return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2", "for card in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay)", "the values of all constituent card objects. Parameters ---------- bypass_face_down : bool Tells", "(value = 21 with 2 cards) self._holder_name = holder_name def __iter__(self): \"\"\" Allows", "stand(self): \"\"\"Updates hand status to inactive: triggered when player chooses to draw no", "Returns ------- Output of 'print_hand' method : str Prints the hand's owner followed", "module exports the 'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related methods. \"\"\"", "no hand value <= 21, 'best_value' = None. \"\"\" max_best_value = 21 all_hand_values", "dealer_score = 0 else: dealer_score = self.best_hand_value() if dealer_score > player_hand.best_hand_value(): return else:", "a hand object held by the dealer. The dealer's hand is unique because:", "print(f\"\\n{player_obj.get_name()} has a natural (dealer does not)!\") payout_multiplier = 2.5 player_obj.update_balance(bet_amount * payout_multiplier)", "True; otherwise False). Returns ------- bool True when card contains two cards with", "yielding constituent card objects in the order they were added. Yields ------ card", "be printed for comparison as the dealer's hand is resolved. player_score_message : str", "to include face-down cards in calculating the value(s) of the hand. Defaults to", "the dealer's hand: drawing cards until the hand value exceeds seventeen. Method initially", "a player gets a natural and the dealer did not, they are immediately", "else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 # This if-else block", "of the dealer's hand. Where a payout is required, the amount bet against", "self.best_hand_value() < dealer_target: self.draw_card(deck_obj) self.print_hand(alt_text=\"\\nDealer hits:\") player_hand.print_hand() print(player_score_message) print(\"\\n---\") time.sleep(draw_delay) else: self.stand() self.print_hand(alt_text=\"\\nDealer", "details of all cards currently within the hand. \"\"\" return self.print_hand() def __len__(self):", "the base Hand class, initialising an empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\")", "statuses accordingly. Parameters ---------- deck_obj : blackjack.deck.Deck The game's 'live' deck object -", "be updated accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be", "== twenty_one: self.stand() if len(self) == natural_length: self._natural = True @staticmethod def _calculate_ace_values(ace_count,", "this player's balance will be updated accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active())", "all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and dealer: player's bet is deposited back", "range(ace_count): first_set = [ ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set", "draw_delay = 1 # The pause in seconds between drawn card actions twenty_one", "(discarded) automatically with player's hand self._reveal_hand() print(\"Dealer has a natural!\") elif not self.is_natural()", "player is queried and set used to define the '_holder_name' attribute on the", "The game's 'live' deck object - cards may be removed from this deck", "1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method settles", "calls its 'flip_card' method to ensure the card is correctly face-down before it", "as an empty list self._active = True # The active status communicates whether", "face-up: returns a list of integers. For hands with any cards face-down: returns", "or 'Dealer'. Defaults to 'Player' for this base hand class. \"\"\" self._live_hand =", "objects. Parameters ---------- bypass_face_down : bool Tells method whether to include face-down cards", "21: returns True; otherwise False). Returns ------- bool True when card contains two", "Raises ------ AssertionError Raised when the hand is inactive (can't accept further cards).", "player object is payed the value of their bet plus the original bet", "owns the input 'player_hand'. Where a payout is required, this player's balance will", "\"\"\" def __init__(self): \"\"\"Calls the __init__ method of the base Hand class, initialising", "face_dir) elif len(self) == 1: face_dir = \"down\" super().draw_card(deck_obj, face_dir) else: face_dir =", "is bust (> 21), returns None. Returns ------- best_value : int or None", "their first two cards at the start of a round. If the dealer", "and has already been removed from the player's balance. \"\"\" self._bet += amount", "when lowest possible hand value exceeds 21; otherwise False. \"\"\" return self._bust def", "the dealer gets a natural, the round is over and they collect the", "'Player' or 'Dealer'. Defaults to 'Player' for this base hand class. \"\"\" self._live_hand", "(within the hand object's '_live_hand' attribute). \"\"\" for card in self._live_hand: yield card", "method calls '_validate_hand_status' that checks whether the hand is now bust and updates", "\"up\" super().draw_card(deck_obj, face_dir) def resolve_hand(self, deck_obj, player_hand, player_score_message): \"\"\" This method automatically resolves", "no duplicates. For a hand with all cards face-up: returns a list of", "added to the hand face-up or face-down. By default, the card will be", "updated accordingly. \"\"\" assert not any( (self.is_active(), player_hand.is_active()) ), \"Bets cannot be settled", "get a natural. If a player gets a natural and the dealer did", "immediately paid 1.5x the value of their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand", "bust (> 21), returns None. Returns ------- best_value : int or None The", "is a natural (value = 21 with 2 cards) self._holder_name = holder_name def", "are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live' hand object. The", "in the current round (value > 21: returns True; otherwise False). Returns -------", "tuple A two-element tuple containing the possible card values an ace can take", "easily compare the relative scores. \"\"\" dealer_target = 17 print(player_score_message) if player_hand.best_hand_value() ==", "card in self: if not card.is_face_up(): card.flip_card() self.print_hand(alt_text=\"Dealer reveals hand:\") print(\"---------------\") time.sleep(draw_delay) def", "This optional argument will be printed instead of the hand owner's name if", "if val <= max_best_value]) except ValueError: best_value = None return best_value def is_active(self):", ": bool Returns True if no further actions are possible in the current", "current round (bust/stand = False; otherwise = True). A hand is regarded as", "Defines the owner, or 'holder', of the hand object bseing created: either 'Player'", "of any player who did not also get a natural. If a player", "stands. If < 17, the hand draws cards until its value exceeds 17", "amount : float The amount bet against the hand object. In typical game", "e.g. (1, 11). Returns ------- ace_sum_possibilities : list of int A list containing", "Returns the best possible value of the hand as an integer. If hand", "instead of the hand owner's name if provided. Returns ------- empty_string : str", "in the hand face-up and prints hand details to the screen.\"\"\" print(\"\\n---------------\") for", "deck_obj, player_hand, player_score_message): \"\"\" This method automatically resolves the dealer's hand: drawing cards", "player (each having their own respective subclasses with specialised methods and attributes). Within", "that owns the hand being initialised. The name of this player is queried", "(face-up by default) calls its 'flip_card' method to ensure the card is correctly", "and dealers can get naturals upon drawing their first two cards at the", "face-down cards, this block adds the consistent face-down string to the face-up values", "regarded as active in a round while cards can still be added to", "the dealer's hand. If the dealer currently has a single card in their", "self.hand_value(bypass_face_down=True) try: best_value = max([val for val in all_hand_values if val <= max_best_value])", "is then displayed when printing hand details to screen. \"\"\" self._bet = float(", "assert ( self.is_active() ), \"Cannot draw a card to this hand: it is", "round. If the dealer gets a natural, the round is over and they", "'DealerHand' subclasses, and related methods. \"\"\" import time draw_delay = 1 # The", "for both players class Hand: \"\"\" A class defining the properties and methods", "list of integers. For hands with any cards face-down: returns a list of", "orientation of the card is determined by the number of cards currently in", "ace_values[0] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] second_set = [ ace_values[1] +", "= 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method", "\"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete else: round_complete =", "method encounters a face-down card try: if card.is_ace(bypass_face_down): ace_count += 1 ace_values =", "hand object. The value of this hand is read and compared to the", "dealer's final hand score is printed to the screen or the player is", "input deck object, the deck returns a single card object and deletes this", "None. \"\"\" max_best_value = 21 all_hand_values = self.hand_value(bypass_face_down=True) try: best_value = max([val for", "the __init__ method of the base Hand class, initialising an empty hand object", "behaves identically to the equivalent method on the base Hand class. \"\"\" if", "objects in the order they were added. Yields ------ card : blackjack.card.Card The", "\"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the hand's owner followed", "in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set)) ace_sum_possibilities.sort() return ace_sum_possibilities class DealerHand(Hand):", "any cards face-down: returns a list of strings. \"\"\" ace_count = 0 ace_values", "is regarded as active in a round while cards can still be added", "else: self.stand() self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust():", "self.stand() elif self.best_hand_value() == twenty_one: self.stand() if len(self) == natural_length: self._natural = True", "respective subclasses with specialised methods and attributes). Within a round of blackjack, cards", "hand. \"\"\" return self.print_hand() def __len__(self): \"\"\"Allows len() to be used on hand", "\"\"\" Removes one card from the input deck and adds this card to", "the current round; otherwise False. \"\"\" return self._active def is_bust(self): \"\"\" As a", "the input 'player_hand'. Where a payout is required, this player's balance will be", "has value equal to 21 or is a natural. Updates hand status accordingly.\"\"\"", "values if face_down_count > 0: hand_value_list = [ str(value) + \" + *-*\"", "hand draws cards until its value exceeds 17 or goes bust. The dealer's", "hand is bust (value > 21) in the current round self._natural = False", "of possible ace values (additional loop over keys of dict?) \"\"\" ace_sum_possibilities =", "Defines whether card is added to the hand face-up or face-down. By default,", "to be iterated over, yielding constituent card objects in the order they were", "all_hand_values if val <= max_best_value]) except ValueError: best_value = None return best_value def", "made against these hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls the __init__ method", "ace_sum_possibilities ] hand_value_list = ace_sum else: hand_value_list = [non_ace_sum] # Where the hand", "dealer! It's a stand-off!\") payout_multiplier = 1 player_obj.update_balance(bet_amount * payout_multiplier) return round_complete def", "their hand, the card is dealt face-down; otherwise face-up. If the method is", "best possible value of the hand as an integer. If hand value is", "is read and compared to the status of the dealer's hand. Where a", "by shorthand details of all cards currently within the hand. Parameters ---------- alt_text", "when a new round starts and new hands are initialised. Parameters ---------- player_hand", "of face-down cards in the hand; counts face-up aces; sums face-up cards that", "print(\"\\n---\") def _reveal_hand(self): \"\"\"Turns all cards in the hand face-up and prints hand", "of their bet plus the original bet amount is returned. If it's a", "also get a natural. If a player gets a natural and the dealer", "Returns ------- ace_sum_possibilities : list of int A list containing each value 'ace_count'", "not, they are immediately paid 1.5x the value of their bet. Parameters ----------", "return self._active def is_bust(self): \"\"\" As a boolean, returns 'bust' status of hand", "can still receive cards in the current round; otherwise False. \"\"\" return self._active", "boolean, returns the active status of the hand in the current round (bust/stand", "who did not also get a natural. If a player gets a natural", "blackjack.hand.PlayerHand A player's 'live' hand object. The 'natural' status of this hand is", "bool True when hand can still receive cards in the current round; otherwise", "try: best_value = max([val for val in all_hand_values if val <= max_best_value]) except", "player object that owns the input 'player_hand'. Where a payout is required, this", "name of this player is queried and set used to define the '_holder_name'", "by summing the values of all constituent card objects. Parameters ---------- bypass_face_down :", "that no further actions are required by the player holding the hand in", "face-down; the dealer's turn in a single round must be resolved automatically. \"\"\"", "original bet; multiplier is 2.5x so bet amount is also deposited back into", "= [ str(value) + \" + *-*\" * face_down_count for value in hand_value_list", "If the player loses, the method exits and their balance is uneffected. The", "As a boolean, returns the active status of the hand in the current", "and deletes this card from the deck. If the 'face_dir' input argument requires", "by 'face_dir'. Calls the 'deal_card' method of an input deck object, the deck", "alt_text=None): \"\"\" Prints the hand's owner followed by shorthand details of all cards", "the target hand by summing the values of all constituent card objects. Parameters", "allow any number of possible ace values (additional loop over keys of dict?)", "to screen. \"\"\" self._bet = float( 0 ) # An attribute holding the", "[0] for ace_idx in range(ace_count): first_set = [ ace_values[0] + ace_sum_element for ace_sum_element", "self.is_natural() and player_hand.is_natural(): # Player wins 1.5x their original bet; multiplier is 2.5x", "between player and dealer: player's bet is deposited back into balance print(f\"\\n{player_obj.get_name()} has", "method exits and their bet is lost. The value of the dealer's and", "read and compared to the value of the dealer's hand. Where a payout", "at the end of a round, the dealer resolves this bet. Parameters ----------", "an integer. If hand value is bust (> 21), returns None. Returns -------", "= 'up'. Any value of face_dir not spelling 'up' (case-insensitive) will add the", "added to the dealer's hand object. face_dir : None / str Defines whether", "base Hand class, initialising an empty hand object for the player. Parameters ----------", "decides to 'stand' at their hand's current value, or if they go bust", "the value of the dealer's hand. Where a payout is required, the amount", "this bet amount has already been verified as positive and has already been", "be used on hand objects, returning the number of cards in the hand", "the face-up values if face_down_count > 0: hand_value_list = [ str(value) + \"", "dealt face-down, the freshly drawn card (face-up by default) calls its 'flip_card' method", "next card in the hand (within the hand object's '_live_hand' attribute). \"\"\" for", "drawing cards until the hand value exceeds seventeen. Method initially checks the dealer's", "did not, they are immediately paid 1.5x the value of their bet. Parameters", "------- Output of 'print_hand' method : str Prints the hand's owner followed by", "a new round starts and new hands are initialised. Parameters ---------- player_hand :", "dealer's hand: drawing cards until the hand value exceeds seventeen. Method initially checks", "signalling that no further actions are required by the player holding the hand", "the hand as the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns", "# No action, round ends and bet is collected (discarded) automatically with player's", "If the dealer gets a natural, the round is over and they collect", "player's 'live' hand object. The value of this hand is read and compared", ": list of int / str A list containing all possible values the", "an empty hand object for the player. Parameters ---------- player_obj : blackjack.player.Player The", "1.5x the value of their bet. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's", "multiplier is 2.5x so bet amount is also deposited back into balance print(f\"\\n{player_obj.get_name()}", "collect the bet of any player who did not also get a natural.", "not any((self.is_natural(), player_hand.is_natural())): round_complete = False return round_complete else: round_complete = True bet_amount", "face-down. By default, the card will be added face-up with face_dir = 'up'.", "of this hand is read and compared to the status of the dealer's", "dealer: player's bet is deposited back into balance print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand()", "= True # The active status communicates whether the hand is still active", "cards in hand and value = 21: returns True; otherwise False). Returns -------", "for value in hand_value_list ] return hand_value_list def best_hand_value(self): \"\"\" Returns the best", "and set used to define the '_holder_name' attribute on the base class. This", "when the associated player chooses to 'hit'. The outcome of each round is", "can still be added to the hand. Once a player decides to 'stand'", "the value(s) of the hand. Defaults to False. Returns ------- hand_value_list : list", "until its value exceeds 17 or goes bust. The dealer's final hand score", "terminal triggers this method, printing all hand details. Returns ------- Output of 'print_hand'", "is dealt face-down; otherwise face-up. If the method is called with face_dir specified,", "False # The bust status communicates whether the hand is bust (value >", "time draw_delay = 1 # The pause in seconds between drawn card actions", "\"\"\" Allows hand objects to be iterated over, yielding constituent card objects in", "required, the amount bet against the hand is also read into 'bet_amount'. player_obj", "Output of 'print_hand' method : str Prints the hand's owner followed by shorthand", "object bseing created: either 'Player' or 'Dealer'. Defaults to 'Player' for this base", "face_dir : None / str Defines whether card is added to the hand", "] return hand_value_list def best_hand_value(self): \"\"\" Returns the best possible value of the", "against the hand is also read into 'bet_amount'. player_obj : blackjack.player.Player The player", "methods specific to a hand object held by a player. Players' hands are", "card.card_value(bypass_face_down) else: non_ace_sum += card.card_value(bypass_face_down) except AssertionError: face_down_count += 1 # This if-else", "number of ace cards to calculate possible summed values for. ace_values : tuple", "of 21. Players and dealers can get naturals upon drawing their first two", "automatically with player's hand self._reveal_hand() print(\"Dealer has a natural!\") elif not self.is_natural() and", "they were added. Yields ------ card : blackjack.card.Card The next card in the", "the start of a round. If the dealer gets a natural, the round", "returns a single card object and deletes this card from the deck. If", "base Hand class, initialising an empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def", "hand value <= 21, 'best_value' = None. \"\"\" max_best_value = 21 all_hand_values =", "alt_text is not None: print(alt_text) elif ends_with_s: print(f\"\\n{self._holder_name}' hand\") else: print(f\"\\n{self._holder_name}'s hand\") for", "name is then displayed when printing hand details to screen. \"\"\" self._bet =", "exceeds seventeen. Method initially checks the dealer's hand value: if its best value", "dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount * payout_multiplier) elif player_hand.best_hand_value() == dealer_score: payout_multiplier =", "added to the hand face-up or face-down. By default, 'face_dir' is None when", "a list of possible values associated with all face-up cards in the hand", "values associated with all face-up cards in the hand if ace_count > 0:", "hand details to the screen.\"\"\" print(\"\\n---------------\") for card in self: if not card.is_face_up():", "bseing created: either 'Player' or 'Dealer'. Defaults to 'Player' for this base hand", "ace_sum_element in ace_sum_possibilities ] second_set = [ ace_values[1] + ace_sum_element for ace_sum_element in", "an empty list self._active = True # The active status communicates whether the", "to the screen or the player is informed that the dealer has gone", "player's balance will be updated accordingly. Returns ------- round_complete : bool Returns True", "be settled between the dealer and a player unless both participants have 'stood'", "associated with either the dealer or a player (each having their own respective", "ace_sum else: hand_value_list = [non_ace_sum] # Where the hand contains face-down cards, this", "None return best_value def is_active(self): \"\"\" As a boolean, returns the active status", "will be removed from this deck and added to the dealer's hand object.", "def __iter__(self): \"\"\" Allows hand objects to be iterated over, yielding constituent card", "self._natural def stand(self): \"\"\"Updates hand status to inactive: triggered when player chooses to", "self._verify_hand_status() def print_hand(self, alt_text=None): \"\"\" Prints the hand's owner followed by shorthand details", "the hand object. In typical game flow, this bet amount has already been", "# This if-else block defines a list of possible values associated with all", "as the object 'length'.\"\"\" return len(self._live_hand) def hand_value(self, bypass_face_down=False): \"\"\" Returns the total", "class DealerHand(Hand): \"\"\" A subclass defining the properties and methods specific to a", "dealer's turn in a single round must be resolved automatically. \"\"\" def __init__(self):", "determined by the number of cards currently in the dealer's hand. If the", "\"\"\" Calls the __init__ method of the base Hand class, initialising an empty", "player_obj.get_name() super().__init__(player_name) def add_bet(self, amount): \"\"\" Adds a bet made by a player", "methods. \"\"\" import time draw_delay = 1 # The pause in seconds between", "False (and the round continues). \"\"\" if not any((self.is_natural(), player_hand.is_natural())): round_complete = False", "str A string that communicates the players score. As the dealer's hand is", "'flip_card' method to ensure the card is correctly face-down before it it is", "list containing each value 'ace_count' number of aces can combine to make. TODO:", "return else: bet_amount = player_hand.get_bet() if player_hand.best_hand_value() > dealer_score: payout_multiplier = 2 player_obj.update_balance(bet_amount", "contains face-down cards, this block adds the consistent face-down string to the face-up", "active status communicates whether the hand is still active in the current round", "card def __repr__(self): \"\"\" Entering the reference for a hand object in the", "self.print_hand(alt_text=\"\\nDealer stands:\") print(f\"Dealer's score = {self.best_hand_value()}\") player_hand.print_hand() print(player_score_message) break if self.is_bust(): self.print_hand(alt_text=\"\\nDealer has", "will always be dealt face-down; the dealer's turn in a single round must", "round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\": drawn_card.flip_card() self._live_hand.append(drawn_card) self._verify_hand_status() def print_hand(self,", "= 2.5 player_obj.update_balance(bet_amount * payout_multiplier) elif all((self.is_natural(), player_hand.is_natural())): # Stand-off between player and", "against their hand is lost when a new round starts and new hands", "from the input deck and adds this card to the hand with orientation", "a round, the dealer resolves this bet. Parameters ---------- amount : float The", ": blackjack.card.Card The next card in the hand (within the hand object's '_live_hand'", "will be updated accordingly. Returns ------- round_complete : bool Returns True if no", "of the base Hand class, initialising an empty hand object for the player.", "methods specific to a hand object held by the dealer. The dealer's hand", "returning the number of cards in the hand as the object 'length'.\"\"\" return", "the hand to be dealt face-down, the freshly drawn card (face-up by default)", "---------- player_obj : blackjack.player.Player The player object that owns the hand being initialised.", "Updates hand status accordingly.\"\"\" natural_length = 2 if self.best_hand_value() is None: self._bust =", "displayed when printing hand details to screen. \"\"\" self._bet = float( 0 )", "DealerHand(Hand): \"\"\" A subclass defining the properties and methods specific to a hand", "cards to calculate possible summed values for. ace_values : tuple A two-element tuple", "hand details to screen. \"\"\" self._bet = float( 0 ) # An attribute", "\"\"\" self._live_hand = ( [] ) # A list of card objects making", "a hand object. A hand object is a collection of cards associated with", "returns 'bust' status of hand in the current round (value > 21: returns", "gets a natural, the round is over and they collect the bet of", "and new hands are initialised. Parameters ---------- player_hand : blackjack.hand.PlayerHand A player's 'live'", ": blackjack.hand.PlayerHand A player's 'live' hand object. The value of this hand is", "ace_count > 0: ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values) ace_sum = [ possibility + non_ace_sum", "whether the hand is now bust and updates all hand statuses accordingly. Parameters", "\"\"\"Updates hand status to inactive: triggered when player chooses to draw no more", "cards can still be added to the hand. Once a player decides to", "\"\"\" return self._bust def is_natural(self): \"\"\" As a boolean, returns 'natural' status of", "True @staticmethod def _calculate_ace_values(ace_count, ace_values): \"\"\" Returns the possible values of a collection", "class, initialising an empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj,", "'face_dir' is None when method is called against a dealer's hand object. Where", "uneffected. The bet placed against their hand is lost when a new round", "balance. \"\"\" self._bet += amount def get_bet(self): \"\"\"Returns the amount bet against this", "their balance is uneffected. The bet placed against their hand is lost when", "card objects making up the hand; initialised as an empty list self._active =", "best_hand_value(self): \"\"\" Returns the best possible value of the hand as an integer.", "to the hand face-up or face-down. By default, the card will be added", "whether the hand is bust, has value equal to 21 or is a", "print(f\"\\n{player_obj.get_name()} has a natural!\") self._reveal_hand() print(\"\\nSo does the dealer! It's a stand-off!\") payout_multiplier", "the dealer or a player (each having their own respective subclasses with specialised", "ace_values) ace_sum = [ possibility + non_ace_sum for possibility in ace_sum_possibilities ] hand_value_list", "= 1 player_obj.update_balance(bet_amount * payout_multiplier) class PlayerHand(Hand): \"\"\" A subclass defining the properties", "= 0 ace_values = None face_down_count = 0 non_ace_sum = 0 # Loop:", "hand object in the terminal triggers this method, printing all hand details. Returns", "False; otherwise = True). A hand is regarded as active in a round", "already been removed from the player's balance. \"\"\" self._bet += amount def get_bet(self):", "A subclass defining the properties and methods specific to a hand object held", "hand face-up or face-down. By default, the card will be added face-up with", "loses, the method exits and their balance is uneffected. The bet placed against", "list containing all possible values the hand's combination of cards can take with", "bet amount has already been verified as positive and has already been removed", "not also get a natural. If a player gets a natural and the", "with face_dir specified, it behaves identically to the equivalent method on the base", "be made against these hands. \"\"\" def __init__(self, player_obj): \"\"\" Calls the __init__", "Hand class, initialising an empty hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self,", "dealer's hands. \"\"\" def __init__(self, holder_name=\"Player\"): \"\"\" Initialises an empty hand object for", "Parameters ---------- bypass_face_down : bool Tells method whether to include face-down cards in", "array. Finally, the method calls '_validate_hand_status' that checks whether the hand is now", "hand is lost when a new round starts and new hands are initialised.", "face-down. By default, 'face_dir' is None when method is called against a dealer's", "is payed the value of their bet plus the original bet amount is", "the hand is bust, has value equal to 21 or is a natural.", "of this player is queried and set used to define the '_holder_name' attribute", "hand object for the dealer.\"\"\" super().__init__(\"Dealer\") def draw_card(self, deck_obj, face_dir=None): \"\"\" Removes one", "ace_values[1] + ace_sum_element for ace_sum_element in ace_sum_possibilities ] ace_sum_possibilities = list(set(first_set + second_set))", "None. Returns ------- best_value : int or None The best possible total value", "is printed so the user can easily compare the relative scores. \"\"\" dealer_target", "with face_dir = 'up'. Any value of face_dir not spelling 'up' (case-insensitive) will", "print_hand(self, alt_text=None): \"\"\" Prints the hand's owner followed by shorthand details of all", "the screen or the player is informed that the dealer has gone bust.", "self: # Try statement catches AssertionErrors thrown when 'is_ace' method encounters a face-down", "cards in the hand; counts face-up aces; sums face-up cards that aren't an", "object. \"\"\" empty_string = \"\" ends_with_s = self._holder_name[-1].lower() == \"s\" if alt_text is", "holding the hand in the current round. Returns ------- bool True when hand", "val in all_hand_values if val <= max_best_value]) except ValueError: best_value = None return", "each value 'ace_count' number of aces can combine to make. TODO: Refactor to", "this card to the hand with orientation defined by 'face_dir'. Parameters ---------- deck_obj", "empty string, returned so that the 'print_hand' method can be called by the", "'bust' status of hand in the current round (value > 21: returns True;", "in the terminal triggers this method, printing all hand details. Returns ------- Output", "hand is resolved, the players score is printed each time the dealer's hand", "bet. Parameters ---------- amount : float The amount bet against the hand object.", "is added to the hand face-up or face-down. By default, the card will", "self._active = True # The active status communicates whether the hand is still", "the current hand object: at the end of a round, the dealer resolves", "as inactive in the current round.\" drawn_card = deck_obj.deal_card() if face_dir.lower() != \"up\":", "round_complete def settle_bet(self, player_hand, player_obj): \"\"\" Method settles any bets at the end", ": int or None The best possible total value of the hand's constituent", "any bets at the end of the round; where the player loses, the", "followed by shorthand details of all cards currently within the hand. Parameters ----------", "bet against the hand is also read into 'bet_amount'. player_obj : blackjack.player.Player The", "deck object - cards may be removed from this deck and added to" ]
[ "to be used as cut-off for removing rare words to be specifiied here.", "contain all images stored in h5py format and captions stored in json files.", "stored in h5py format and captions stored in json files. Minimum word frequencies", "import create_input_files \"\"\" To create files that contain all images stored in h5py", "stored in json files. Minimum word frequencies to be used as cut-off for", "files. Minimum word frequencies to be used as cut-off for removing rare words", "word frequencies to be used as cut-off for removing rare words to be", "here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco', karpathy_json_path='path_to___dataset_coco.json', image_folder='path_to__mscoco_folder', captions_per_image=5, min_word_freq=5, output_folder='folder_for_processed_data', max_len=50)", "json files. Minimum word frequencies to be used as cut-off for removing rare", "be specifiied here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco', karpathy_json_path='path_to___dataset_coco.json', image_folder='path_to__mscoco_folder', captions_per_image=5, min_word_freq=5,", "from utils import create_input_files \"\"\" To create files that contain all images stored", "To create files that contain all images stored in h5py format and captions", "h5py format and captions stored in json files. Minimum word frequencies to be", "be used as cut-off for removing rare words to be specifiied here. \"\"\"", "words to be specifiied here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco', karpathy_json_path='path_to___dataset_coco.json', image_folder='path_to__mscoco_folder',", "utils import create_input_files \"\"\" To create files that contain all images stored in", "create files that contain all images stored in h5py format and captions stored", "Minimum word frequencies to be used as cut-off for removing rare words to", "frequencies to be used as cut-off for removing rare words to be specifiied", "cut-off for removing rare words to be specifiied here. \"\"\" if __name__ ==", "that contain all images stored in h5py format and captions stored in json", "captions stored in json files. Minimum word frequencies to be used as cut-off", "removing rare words to be specifiied here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco',", "files that contain all images stored in h5py format and captions stored in", "in json files. Minimum word frequencies to be used as cut-off for removing", "rare words to be specifiied here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco', karpathy_json_path='path_to___dataset_coco.json',", "as cut-off for removing rare words to be specifiied here. \"\"\" if __name__", "specifiied here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco', karpathy_json_path='path_to___dataset_coco.json', image_folder='path_to__mscoco_folder', captions_per_image=5, min_word_freq=5, output_folder='folder_for_processed_data',", "for removing rare words to be specifiied here. \"\"\" if __name__ == '__main__':", "in h5py format and captions stored in json files. Minimum word frequencies to", "\"\"\" To create files that contain all images stored in h5py format and", "create_input_files \"\"\" To create files that contain all images stored in h5py format", "and captions stored in json files. Minimum word frequencies to be used as", "to be specifiied here. \"\"\" if __name__ == '__main__': create_input_files(dataset='coco', karpathy_json_path='path_to___dataset_coco.json', image_folder='path_to__mscoco_folder', captions_per_image=5,", "used as cut-off for removing rare words to be specifiied here. \"\"\" if", "all images stored in h5py format and captions stored in json files. Minimum", "format and captions stored in json files. Minimum word frequencies to be used", "images stored in h5py format and captions stored in json files. Minimum word" ]
[ "def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq", "TableManager: def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq]", "low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq 2\"]<high_freq]", "high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq 2\"]<high_freq] table=table[(table[\"Freq", "def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq]", "print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq 2\"]<high_freq] table=table[(table[\"Freq 1\"]-table[\"Freq 2\"]).abs()<delta_freq] return", "pandas as pd class TableManager: def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq)", "import pandas as pd class TableManager: def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq)", "self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq", "get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq", "pd class TableManager: def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns)", "table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq 2\"]<high_freq] table=table[(table[\"Freq 1\"]-table[\"Freq 2\"]).abs()<delta_freq]", "table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq 2\"]<high_freq] table=table[(table[\"Freq 1\"]-table[\"Freq 2\"]).abs()<delta_freq] return table[[\"Feature\",\"Info\"]]", "as pd class TableManager: def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target]", "delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq] table=table[table[\"Freq 2\"]>low_freq] table=table[table[\"Freq 2\"]<high_freq] table=table[(table[\"Freq 1\"]-table[\"Freq", "__init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq 1\"]>low_freq] table=table[table[\"Freq 1\"]<high_freq]", "class TableManager: def __init__(self,table_file): self.master_table=pd.read_csv(table_file).sort_values(\"Info\",ascending=False) def get_filtered_table(self,low_freq,high_freq,delta_freq,target): low_freq=float(low_freq) high_freq=float(high_freq) delta_freq=float(delta_freq) table=self.master_table[self.master_table[\"Target\"]==target] print(table.columns) table=table[table[\"Freq" ]
[ "= [] for value in range(1, 11): squares1.append(value**2) print(squares1) # With list comprehension", "for value in range(1, 11): squares1.append(value**2) print(squares1) # With list comprehension squares2 =", "value in range(1, 11): squares1.append(value**2) print(squares1) # With list comprehension squares2 = [value**2", "comprehension squares1 = [] for value in range(1, 11): squares1.append(value**2) print(squares1) # With", "range(1, 11): squares1.append(value**2) print(squares1) # With list comprehension squares2 = [value**2 for value", "print(squares1) # With list comprehension squares2 = [value**2 for value in range(1, 11)]", "# With list comprehension squares2 = [value**2 for value in range(1, 11)] print(squares2)", "[] for value in range(1, 11): squares1.append(value**2) print(squares1) # With list comprehension squares2", "Without list comprehension squares1 = [] for value in range(1, 11): squares1.append(value**2) print(squares1)", "squares1.append(value**2) print(squares1) # With list comprehension squares2 = [value**2 for value in range(1,", "list comprehension squares1 = [] for value in range(1, 11): squares1.append(value**2) print(squares1) #", "in range(1, 11): squares1.append(value**2) print(squares1) # With list comprehension squares2 = [value**2 for", "11): squares1.append(value**2) print(squares1) # With list comprehension squares2 = [value**2 for value in", "# Without list comprehension squares1 = [] for value in range(1, 11): squares1.append(value**2)", "squares1 = [] for value in range(1, 11): squares1.append(value**2) print(squares1) # With list" ]
[ "new_state_dict[name] = v return new_state_dict if __name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\",", "def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx = 0 new_state_dict =", "new_state_dict if __name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape", "= CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1, 3, 960, 960))", "1 else: start_idx = 0 new_state_dict = OrderedDict() for k, v in state_dict.items():", "start_idx = 0 new_state_dict = OrderedDict() for k, v in state_dict.items(): name =", "= OrderedDict() for k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v", "= torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input': [2,3]})", "= \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if __name__ == '__main__': net =", "#dynamic shape x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"],", "2022/3/8 14:38 # @Author : jiaopaner import sys sys.path.insert(0, './') import torch from", ": 2022/3/8 14:38 # @Author : jiaopaner import sys sys.path.insert(0, './') import torch", "utf-8 -*- # @Time : 2022/3/8 14:38 # @Author : jiaopaner import sys", "k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if", "# -*- coding: utf-8 -*- # @Time : 2022/3/8 14:38 # @Author :", "from collections import OrderedDict from craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx", "start_idx = 1 else: start_idx = 0 new_state_dict = OrderedDict() for k, v", "14:38 # @Author : jiaopaner import sys sys.path.insert(0, './') import torch from collections", "v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if __name__", "craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx =", "in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if __name__ ==", "import OrderedDict from craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1", "CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx = 0 new_state_dict", "= 0 new_state_dict = OrderedDict() for k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:])", "'./') import torch from collections import OrderedDict from craft import CRAFT def copyStateDict(state_dict):", "= 1 else: start_idx = 0 new_state_dict = OrderedDict() for k, v in", "else: start_idx = 0 new_state_dict = OrderedDict() for k, v in state_dict.items(): name", "x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input':", "return new_state_dict if __name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic", "960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input': [2,3]}) # x2paddle --framework=onnx", "coding: utf-8 -*- # @Time : 2022/3/8 14:38 # @Author : jiaopaner import", "new_state_dict = OrderedDict() for k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] =", "collections import OrderedDict from craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx =", "sys sys.path.insert(0, './') import torch from collections import OrderedDict from craft import CRAFT", "shape x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"],", "import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx = 0", "v return new_state_dict if __name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval()", "'__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1, 3,", "if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx = 0 new_state_dict = OrderedDict() for", "copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx = 0 new_state_dict = OrderedDict()", "-*- # @Time : 2022/3/8 14:38 # @Author : jiaopaner import sys sys.path.insert(0,", "0 new_state_dict = OrderedDict() for k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name]", "state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if __name__ == '__main__':", "# @Author : jiaopaner import sys sys.path.insert(0, './') import torch from collections import", "name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if __name__ == '__main__': net", "\".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict if __name__ == '__main__': net = CRAFT()", "net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1, 3, 960,", "map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx',", "import sys sys.path.insert(0, './') import torch from collections import OrderedDict from craft import", "sys.path.insert(0, './') import torch from collections import OrderedDict from craft import CRAFT def", "3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input': [2,3]}) # x2paddle", "@Author : jiaopaner import sys sys.path.insert(0, './') import torch from collections import OrderedDict", "== '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1,", "OrderedDict() for k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return", "from craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx", "torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input': [2,3]}) #", "960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input': [2,3]}) # x2paddle --framework=onnx --model=./pd_model/model.onnx", "= v return new_state_dict if __name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\")))", "net.eval() #dynamic shape x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11,", "@Time : 2022/3/8 14:38 # @Author : jiaopaner import sys sys.path.insert(0, './') import", ": jiaopaner import sys sys.path.insert(0, './') import torch from collections import OrderedDict from", "import torch from collections import OrderedDict from craft import CRAFT def copyStateDict(state_dict): if", "torch from collections import OrderedDict from craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"):", "-*- coding: utf-8 -*- # @Time : 2022/3/8 14:38 # @Author : jiaopaner", "# @Time : 2022/3/8 14:38 # @Author : jiaopaner import sys sys.path.insert(0, './')", "__name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x =", "CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net,", "for k, v in state_dict.items(): name = \".\".join(k.split(\".\")[start_idx:]) new_state_dict[name] = v return new_state_dict", "net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x = torch.randn((1, 3, 960, 960)) torch.onnx.export(net, x,", "if __name__ == '__main__': net = CRAFT() net.load_state_dict(copyStateDict(torch.load(\"/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth\", map_location=\"cpu\"))) net.eval() #dynamic shape x", "jiaopaner import sys sys.path.insert(0, './') import torch from collections import OrderedDict from craft", "OrderedDict from craft import CRAFT def copyStateDict(state_dict): if list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else:", "torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=[\"input\"], output_names=[\"output\"], dynamic_axes={'input': [2,3]}) # x2paddle --framework=onnx --model=./pd_model/model.onnx --save_dir=pd_model_dynamic", "list(state_dict.keys())[0].startswith(\"module\"): start_idx = 1 else: start_idx = 0 new_state_dict = OrderedDict() for k," ]
[ "Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\"", "\"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\"", "Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)),", "Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\"", "Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\",", "Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\",", "Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",),", "\"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"),", "Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\"", "add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0,", "Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)),", "Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"),", "import * print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\",", "Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"),", "Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)),", "\"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\",", "\"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\"", ")) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0,", "Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\"", "Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\"", "Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",),", ")) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\",", "Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0,", "\"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" ))", "Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)),", "Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\",", "\"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"),", "* print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram(", "\"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\",", "\"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)),", "Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\"", "Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0,", "Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\",", "\"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\"", "\"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\"", "Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"),", "Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"),", "\"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram(", "\"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"),", "Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0,", "\")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram(", "Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0,", "Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"),", "Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\"", ")) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\",", "add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\"", "Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\"", "\"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"),", "\"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\"", "\"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\",", "\"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\",", "\"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram(", "Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\",", "\"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\",", "Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\",", "Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram(", "\"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" ))", "Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\"", "Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\"", "\"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\"", "Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",),", "sys from railroad import * print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",),", "\"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" ))", "\"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" ))", "\"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\",", "Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\",", "\")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\",", "\")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)),", ")) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram(", "add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\"", "\"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\",", "Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"),", "add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0,", "Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\"", "\"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"),", "\"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"),", "\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\",", "Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0,", "\"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\"", "print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0,", "\"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\"", "\"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\",", "Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" ))", "\"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\",", "\")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\",", "Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" ))", "Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\"", "\"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" ))", "Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\"", "\"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"),", "Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0,", "\"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"),", "Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",),", ")) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",),", "Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\"", "Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",),", "Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0,", "Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",),", "Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",),", "\"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\"", "Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)),", "Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0,", "Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" ))", "\"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\"", "Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~p\"),)), \")\"", "\"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"),", "add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0,", "\"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"),", "Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\"", "print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\",", "Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0,", "\"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"),", "railroad import * print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" ))", "\"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\",", "Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"),", "add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"),", "Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",),", "\"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"),", "\"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\"", "\"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0,", ")) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0,", "\"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\",", "\"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0,", "Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\"", "Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0,", "Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0,", "\"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"),", "\"), \"Site\",), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" ))", "Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0,", "\"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\",", "\"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\",", "\")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0,", "Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0,", "\")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"),", "Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\"", "Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\"", "Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0,", "Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",),", "\"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>')", "\"Shc(\", Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)),", "\"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"),", "\")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",),", "import sys from railroad import * print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"),", "\"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)),", "\"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\"", "print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\",", "Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~p\"),)),", "\"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGFR.EGFR\",", "\"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>')", "\"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\",", "Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"),", "Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"),", "\"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"),", "Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)),", "Choice(0, Comment(\" \"), \"sh3\",), Choice(0, Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\"", ")) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\"", "\"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\"", "\"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"),", "Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0, Comment(\"", "Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0,", "\"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\" \"), \"sh2\",), \")\" ))", "Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)), \")\" ))", "\"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~C\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"),", "Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0,", "Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram(", "\"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\"", "Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram(", "\"Site\",), \")\" )) add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"),", "Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\"),)),", "\")\" )) add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\" )) add(\"Grb2\", Diagram(", "Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\"", "Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~u\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0,", "Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\", Choice(0, Comment(\"", "\"), \"~u\", \"~p\"),)), \")\" )) print('<h1>Species</h1>') add(\"EGFR\", Diagram( \"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",),", "Comment(\" \"), Sequence(\"Y\", Choice(0, Comment(\" \"), \"~p\"),)), \")\" )) add(\"Shc\", Diagram( \"Shc(\", Choice(0,", "\"~u\"),)), \")\" )) add(\"EGFR.EGFR\", Diagram(\"EGFR(\", Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\",", "Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~u\", \"~p\"),)), \")\" )) add(\"Grb2\", Diagram( \"Grb2(\",", "Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~B\"),)), \")\",\"EGFR(\",", "from railroad import * print('<h1>Molecules</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0, Comment(\" \"), \"Site\",), \")\"", "Choice(0, Comment(\" \"), \"ecd\",), Choice(0, Comment(\" \"), \"tmd\",), Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0,", "Choice(0, Comment(\" \"), Sequence(\"Y1\", Choice(0, Comment(\" \"), \"~A\"),)), Choice(0, Comment(\" \"), Sequence(\"Y2\", Choice(0,", "\"), Sequence(\"Y2\", Choice(0, Comment(\" \"), \"~D\"),)), \")\" )) print('<h1>Observables</h1>') add(\"EGF\", Diagram( \"EGF(\", Choice(0," ]
[ "\"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\"))", "strength J/m Km = 0.5 * mu0 * Ms**2 # magnetostatic energy density", "zs = np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3)) # easy axis probed", "= fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs,", "with anisotropy vectors as follows: ----------------------------------- --> --> --> --> --> --> -->", "the film and # (1, 0, 0) in the upper half. This is", "np.pi * 10**-7 # vacuum permeability N/A^2 Ms = 1.0e6 # saturation magnetisation", "Ly, Lz), nx, ny, nz) # Anisotropy easy axis is (0, 0, 1)", "pylab import dolfin as df import matplotlib.pyplot as plt from finmag import Simulation", "m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR, 'exchangespring.pvd')) if", "label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR,", "import Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0", "dim=3) a = Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1))", "np import matplotlib as mpl mpl.use('Agg') import pylab import dolfin as df import", "Km L = lexch / unit_length nx = 10 Lx = nx *", "working on. boundary = Lz / 2.0 expr_a = df.Expression((\"x[2] <= b ?", "= np.zeros((points, 3)) # easy axis probed along z-axis m_zs = np.zeros((points, 3))", "Anisotropy easy axis is (0, 0, 1) in the lower half of the", "lower half of the film and # (1, 0, 0) in the upper", "import pylab import dolfin as df import matplotlib.pyplot as plt from finmag import", "| | | | | | | | | ----------------------------------- \"\"\" import os", "boundary = Lz / 2.0 expr_a = df.Expression((\"x[2] <= b ? 0 :", "sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\",", ": 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V,", "^ ^ | | | | | | | | | | |", "V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V, expr_a) sim = Simulation(mesh,", "is (0, 0, 1) in the lower half of the film and #", "L ny = 1 Ly = ny * L nz = 30 Lz", "ny = 1 Ly = ny * L nz = 30 Lz =", "--> --> --> --> --> --> --> --> ----------------------------------- ^ ^ ^ ^", "1 : 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a =", "<= b ? 1 : 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0,", "| ----------------------------------- \"\"\" import os import numpy as np import matplotlib as mpl", "enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig =", "| | | | | | | | ----------------------------------- \"\"\" import os import", "ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\")", "# Anisotropy easy axis is (0, 0, 1) in the lower half of", "Lz = nz * L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz),", "df.Expression((\"x[2] <= b ? 0 : 1\", \"0\", \"x[2] <= b ? 1", "= nz * L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx,", "unit_length nx = 10 Lx = nx * L ny = 1 Ly", "axis is (0, 0, 1) in the lower half of the film and", "^ | | | | | | | | | | | |", "probed along z-axis m_zs = np.zeros((points, 3)) # magnetisation probed along z-axis for", "as plt from finmag import Simulation from finmag.field import Field from finmag.energies import", "= os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 * np.pi * 10**-7 # vacuum", "from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0", "N/A^2 Ms = 1.0e6 # saturation magnetisation A/m A = 13.0e-12 # exchange", "? 1 : 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a", "= (A/Km)**0.5 # exchange length m unit_length = 1e-9 K1 = Km L", "<= b ? 0 : 1\", \"0\", \"x[2] <= b ? 1 :", "UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 * np.pi *", "density scale kg/ms^2 lexch = (A/Km)**0.5 # exchange length m unit_length = 1e-9", "nz) # Anisotropy easy axis is (0, 0, 1) in the lower half", "anisotropy vectors as follows: ----------------------------------- --> --> --> --> --> --> --> -->", "import dolfin as df import matplotlib.pyplot as plt from finmag import Simulation from", "= 13.0e-12 # exchange coupling strength J/m Km = 0.5 * mu0 *", "<filename>examples/spatially-varying-anisotropy/run.py<gh_stars>1-10 \"\"\" Demonstrating spatially varying anisotropy. Example with anisotropy vectors as follows: -----------------------------------", "= 1.0e6 # saturation magnetisation A/m A = 13.0e-12 # exchange coupling strength", "mu0 * Ms**2 # magnetostatic energy density scale kg/ms^2 lexch = (A/Km)**0.5 #", "--> --> --> --> --> --> --> --> --> --> ----------------------------------- ^ ^", "* L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx, ny, nz)", "| | | | | | ----------------------------------- \"\"\" import os import numpy as", "exchange spring # systems that <NAME> is working on. boundary = Lz /", "magnetisation probed along z-axis for i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0,", "^ ^ ^ ^ ^ ^ ^ ^ ^ | | | |", "K1 = Km L = lexch / unit_length nx = 10 Lx =", "Ly = ny * L nz = 30 Lz = nz * L", "mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx, ny, nz) # Anisotropy", "probed along z-axis for i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z))", "= plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2],", "axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\",", "scale kg/ms^2 lexch = (A/Km)**0.5 # exchange length m unit_length = 1e-9 K1", "1.0e6 # saturation magnetisation A/m A = 13.0e-12 # exchange coupling strength J/m", "nx = 10 Lx = nx * L ny = 1 Ly =", "df import matplotlib.pyplot as plt from finmag import Simulation from finmag.field import Field", "model of the exchange spring # systems that <NAME> is working on. boundary", "label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\")", "import matplotlib.pyplot as plt from finmag import Simulation from finmag.field import Field from", "(0, 0, 1) in the lower half of the film and # (1,", "numpy as np import matplotlib as mpl mpl.use('Agg') import pylab import dolfin as", "axis_zs = np.zeros((points, 3)) # easy axis probed along z-axis m_zs = np.zeros((points,", "length m unit_length = 1e-9 K1 = Km L = lexch / unit_length", "^ ^ ^ ^ ^ ^ | | | | | | |", "2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR, 'exchangespring.pvd')) if __name__", "along z-axis m_zs = np.zeros((points, 3)) # magnetisation probed along z-axis for i,", "fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:,", "* L nz = 30 Lz = nz * L mesh = df.BoxMesh(df.Point(0,", "2.0 expr_a = df.Expression((\"x[2] <= b ? 0 : 1\", \"0\", \"x[2] <=", "a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax =", "/ 2.0 expr_a = df.Expression((\"x[2] <= b ? 0 : 1\", \"0\", \"x[2]", "\"DG\", 0, dim=3) a = Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1,", "b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V, expr_a) sim", "# easy axis probed along z-axis m_zs = np.zeros((points, 3)) # magnetisation probed", "saturation magnetisation A/m A = 13.0e-12 # exchange coupling strength J/m Km =", "This is a toy model of the exchange spring # systems that <NAME>", "--> --> --> --> --> ----------------------------------- ^ ^ ^ ^ ^ ^ ^", "--> --> --> --> ----------------------------------- ^ ^ ^ ^ ^ ^ ^ ^", "nz * L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx, ny,", "vacuum permeability N/A^2 Ms = 1.0e6 # saturation magnetisation A/m A = 13.0e-12", "1) in the lower half of the film and # (1, 0, 0)", "df.Point(Lx, Ly, Lz), nx, ny, nz) # Anisotropy easy axis is (0, 0,", "for i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0,", "ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0],", "= np.zeros((points, 3)) # magnetisation probed along z-axis for i, z in enumerate(zs):", "from finmag.field import Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def", "is a toy model of the exchange spring # systems that <NAME> is", "lexch / unit_length nx = 10 Lx = nx * L ny =", "4.0 * np.pi * 10**-7 # vacuum permeability N/A^2 Ms = 1.0e6 #", "= ny * L nz = 30 Lz = nz * L mesh", "df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx, ny, nz) # Anisotropy easy axis", "mpl.use('Agg') import pylab import dolfin as df import matplotlib.pyplot as plt from finmag", "Lx = nx * L ny = 1 Ly = ny * L", "= np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3)) # easy axis probed along", "# magnetostatic energy density scale kg/ms^2 lexch = (A/Km)**0.5 # exchange length m", "1 Ly = ny * L nz = 30 Lz = nz *", "^ ^ ^ ^ ^ ^ ^ ^ | | | | |", "L = lexch / unit_length nx = 10 Lx = nx * L", "matplotlib.pyplot as plt from finmag import Simulation from finmag.field import Field from finmag.energies", "* L ny = 1 Ly = ny * L nz = 30", "| | | | | | | | | | | | |", "2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\")", "^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ | |", "mu0 = 4.0 * np.pi * 10**-7 # vacuum permeability N/A^2 Ms =", "\"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs,", "ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR, 'exchangespring.pvd')) if __name__ == \"__main__\": run_simulation(plot=True)", "10 Lx = nx * L ny = 1 Ly = ny *", "import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 * np.pi", "--> --> --> --> --> --> --> --> --> --> --> --> -----------------------------------", "spring # systems that <NAME> is working on. boundary = Lz / 2.0", "= df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx, ny, nz) # Anisotropy easy", "^ ^ ^ ^ ^ | | | | | | | |", "* Ms**2 # magnetostatic energy density scale kg/ms^2 lexch = (A/Km)**0.5 # exchange", "np.zeros((points, 3)) # easy axis probed along z-axis m_zs = np.zeros((points, 3)) #", "= 1e-9 K1 = Km L = lexch / unit_length nx = 10", "A = 13.0e-12 # exchange coupling strength J/m Km = 0.5 * mu0", "sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points = 200 zs = np.linspace(0, Lz,", "Km = 0.5 * mu0 * Ms**2 # magnetostatic energy density scale kg/ms^2", "\"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z", "200 zs = np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3)) # easy axis", "Demonstrating spatially varying anisotropy. Example with anisotropy vectors as follows: ----------------------------------- --> -->", "| | | | | | | | | | | | -----------------------------------", "# exchange length m unit_length = 1e-9 K1 = Km L = lexch", "along z-axis for i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i]", "os import numpy as np import matplotlib as mpl mpl.use('Agg') import pylab import", "L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly, Lz), nx, ny, nz) #", "0, 1) in the lower half of the film and # (1, 0,", "m unit_length = 1e-9 K1 = Km L = lexch / unit_length nx", "| | | ----------------------------------- \"\"\" import os import numpy as np import matplotlib", "easy axis is (0, 0, 1) in the lower half of the film", "a = Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1,", "vectors as follows: ----------------------------------- --> --> --> --> --> --> --> --> -->", "ny * L nz = 30 Lz = nz * L mesh =", "a toy model of the exchange spring # systems that <NAME> is working", "varying anisotropy. Example with anisotropy vectors as follows: ----------------------------------- --> --> --> -->", "= Lz / 2.0 expr_a = df.Expression((\"x[2] <= b ? 0 : 1\",", "# vacuum permeability N/A^2 Ms = 1.0e6 # saturation magnetisation A/m A =", "Lz / 2.0 expr_a = df.Expression((\"x[2] <= b ? 0 : 1\", \"0\",", "sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if", "sim.relax() if plot: points = 200 zs = np.linspace(0, Lz, points) axis_zs =", "= 4.0 * np.pi * 10**-7 # vacuum permeability N/A^2 Ms = 1.0e6", "\"\"\" Demonstrating spatially varying anisotropy. Example with anisotropy vectors as follows: ----------------------------------- -->", "import Simulation from finmag.field import Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR =", "(1, 0, 0) in the upper half. This is a toy model of", "in the upper half. This is a toy model of the exchange spring", "J/m Km = 0.5 * mu0 * Ms**2 # magnetostatic energy density scale", "half. This is a toy model of the exchange spring # systems that", "in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig", "* np.pi * 10**-7 # vacuum permeability N/A^2 Ms = 1.0e6 # saturation", ": 1\", \"0\", \"x[2] <= b ? 1 : 0\"), b=boundary, degree=1) V", "| | | | ----------------------------------- \"\"\" import os import numpy as np import", "finmag import Simulation from finmag.field import Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR", "i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0,", "upper half. This is a toy model of the exchange spring # systems", "0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR,", "Simulation from finmag.field import Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__))", "plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\",", "as mpl mpl.use('Agg') import pylab import dolfin as df import matplotlib.pyplot as plt", "nx, ny, nz) # Anisotropy easy axis is (0, 0, 1) in the", "3)) # easy axis probed along z-axis m_zs = np.zeros((points, 3)) # magnetisation", "0), df.Point(Lx, Ly, Lz), nx, ny, nz) # Anisotropy easy axis is (0,", "Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 =", "import matplotlib as mpl mpl.use('Agg') import pylab import dolfin as df import matplotlib.pyplot", "m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:,", "np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3)) # easy axis probed along z-axis", "0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V, expr_a)", "| | ----------------------------------- \"\"\" import os import numpy as np import matplotlib as", "ny, nz) # Anisotropy easy axis is (0, 0, 1) in the lower", "b ? 0 : 1\", \"0\", \"x[2] <= b ? 1 : 0\"),", "exchange coupling strength J/m Km = 0.5 * mu0 * Ms**2 # magnetostatic", "z-axis for i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] =", "finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 *", "# magnetisation probed along z-axis for i, z in enumerate(zs): axis_zs[i] = a((Lx/2.0,", "= Km L = lexch / unit_length nx = 10 Lx = nx", "1\", \"0\", \"x[2] <= b ? 1 : 0\"), b=boundary, degree=1) V =", "0, 0), df.Point(Lx, Ly, Lz), nx, ny, nz) # Anisotropy easy axis is", "as follows: ----------------------------------- --> --> --> --> --> --> --> --> --> -->", "energy density scale kg/ms^2 lexch = (A/Km)**0.5 # exchange length m unit_length =", "* mu0 * Ms**2 # magnetostatic energy density scale kg/ms^2 lexch = (A/Km)**0.5", "ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper", "a)) sim.add(Exchange(A)) sim.relax() if plot: points = 200 zs = np.linspace(0, Lz, points)", "and # (1, 0, 0) in the upper half. This is a toy", "\"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR, 'exchangespring.pvd')) if __name__ ==", "label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR, 'exchangespring.pvd')) if __name__ == \"__main__\":", "dolfin as df import matplotlib.pyplot as plt from finmag import Simulation from finmag.field", "z-axis m_zs = np.zeros((points, 3)) # magnetisation probed along z-axis for i, z", "ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\") plt.savefig(os.path.join(MODULE_DIR, \"profile.png\")) sim.m_field.save_pvd(os.path.join(MODULE_DIR, 'exchangespring.pvd'))", "expr_a = df.Expression((\"x[2] <= b ? 0 : 1\", \"0\", \"x[2] <= b", "in the lower half of the film and # (1, 0, 0) in", "z in enumerate(zs): axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z))", "kg/ms^2 lexch = (A/Km)**0.5 # exchange length m unit_length = 1e-9 K1 =", "--> --> --> --> --> --> --> --> --> --> --> ----------------------------------- ^", "sim.add(Exchange(A)) sim.relax() if plot: points = 200 zs = np.linspace(0, Lz, points) axis_zs", "| | | | | | | | | | ----------------------------------- \"\"\" import", "0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\")", "points) axis_zs = np.zeros((points, 3)) # easy axis probed along z-axis m_zs =", "run_simulation(plot=False): mu0 = 4.0 * np.pi * 10**-7 # vacuum permeability N/A^2 Ms", "plot: points = 200 zs = np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3))", "the lower half of the film and # (1, 0, 0) in the", "film and # (1, 0, 0) in the upper half. This is a", "^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ |", "def run_simulation(plot=False): mu0 = 4.0 * np.pi * 10**-7 # vacuum permeability N/A^2", "the exchange spring # systems that <NAME> is working on. boundary = Lz", "Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A))", "= sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0],", "anisotropy. Example with anisotropy vectors as follows: ----------------------------------- --> --> --> --> -->", "z)) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs,", "Ly/2.0, z)) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\")", "easy axis probed along z-axis m_zs = np.zeros((points, 3)) # magnetisation probed along", "--> --> --> --> --> --> --> --> --> ----------------------------------- ^ ^ ^", "of the film and # (1, 0, 0) in the upper half. This", "0, dim=3) a = Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0,", "MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 * np.pi * 10**-7 #", "Ms = 1.0e6 # saturation magnetisation A/m A = 13.0e-12 # exchange coupling", "L nz = 30 Lz = nz * L mesh = df.BoxMesh(df.Point(0, 0,", "magnetostatic energy density scale kg/ms^2 lexch = (A/Km)**0.5 # exchange length m unit_length", "sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points = 200 zs", "= Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot:", "systems that <NAME> is working on. boundary = Lz / 2.0 expr_a =", "exchange length m unit_length = 1e-9 K1 = Km L = lexch /", "b ? 1 : 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3)", "# saturation magnetisation A/m A = 13.0e-12 # exchange coupling strength J/m Km", "lexch = (A/Km)**0.5 # exchange length m unit_length = 1e-9 K1 = Km", "axis probed along z-axis m_zs = np.zeros((points, 3)) # magnetisation probed along z-axis", "if plot: points = 200 zs = np.linspace(0, Lz, points) axis_zs = np.zeros((points,", "^ ^ ^ ^ ^ ^ ^ ^ ^ ^ | | |", "^ ^ ^ ^ | | | | | | | | |", "^ ^ ^ | | | | | | | | | |", "finmag.field import Field from finmag.energies import UniaxialAnisotropy, Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False):", "# systems that <NAME> is working on. boundary = Lz / 2.0 expr_a", "(A/Km)**0.5 # exchange length m unit_length = 1e-9 K1 = Km L =", "0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points = 200 zs =", "of the exchange spring # systems that <NAME> is working on. boundary =", "ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2],", "from finmag import Simulation from finmag.field import Field from finmag.energies import UniaxialAnisotropy, Exchange", "import os import numpy as np import matplotlib as mpl mpl.use('Agg') import pylab", "0) in the upper half. This is a toy model of the exchange", "= 10 Lx = nx * L ny = 1 Ly = ny", "as np import matplotlib as mpl mpl.use('Agg') import pylab import dolfin as df", "expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax()", "\"0\", \"x[2] <= b ? 1 : 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh,", "13.0e-12 # exchange coupling strength J/m Km = 0.5 * mu0 * Ms**2", "Example with anisotropy vectors as follows: ----------------------------------- --> --> --> --> --> -->", "Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points", "Lz), nx, ny, nz) # Anisotropy easy axis is (0, 0, 1) in", "nz = 30 Lz = nz * L mesh = df.BoxMesh(df.Point(0, 0, 0),", "m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:, 2], \"-\", label=\"m_z\") ax.set_xlabel(\"z (nm)\") ax.legend(loc=\"upper left\")", "m_zs = np.zeros((points, 3)) # magnetisation probed along z-axis for i, z in", "| | | | | ----------------------------------- \"\"\" import os import numpy as np", "unit_length = 1e-9 K1 = Km L = lexch / unit_length nx =", "----------------------------------- \"\"\" import os import numpy as np import matplotlib as mpl mpl.use('Agg')", "* 10**-7 # vacuum permeability N/A^2 Ms = 1.0e6 # saturation magnetisation A/m", "= 0.5 * mu0 * Ms**2 # magnetostatic energy density scale kg/ms^2 lexch", "= 30 Lz = nz * L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx,", "= df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V, expr_a) sim = Simulation(mesh, Ms,", "--> --> ----------------------------------- ^ ^ ^ ^ ^ ^ ^ ^ ^ ^", "Ms**2 # magnetostatic energy density scale kg/ms^2 lexch = (A/Km)**0.5 # exchange length", "spatially varying anisotropy. Example with anisotropy vectors as follows: ----------------------------------- --> --> -->", "plt from finmag import Simulation from finmag.field import Field from finmag.energies import UniaxialAnisotropy,", "= 1 Ly = ny * L nz = 30 Lz = nz", "= lexch / unit_length nx = 10 Lx = nx * L ny", "----------------------------------- ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^", "/ unit_length nx = 10 Lx = nx * L ny = 1", "--> --> --> --> --> --> --> ----------------------------------- ^ ^ ^ ^ ^", "\"\"\" import os import numpy as np import matplotlib as mpl mpl.use('Agg') import", "follows: ----------------------------------- --> --> --> --> --> --> --> --> --> --> -->", "as df import matplotlib.pyplot as plt from finmag import Simulation from finmag.field import", "--> ----------------------------------- ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^", "1e-9 K1 = Km L = lexch / unit_length nx = 10 Lx", "| | | | | | | ----------------------------------- \"\"\" import os import numpy", "# exchange coupling strength J/m Km = 0.5 * mu0 * Ms**2 #", "fig.add_subplot(111) ax.plot(zs, axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:,", "toy model of the exchange spring # systems that <NAME> is working on.", "permeability N/A^2 Ms = 1.0e6 # saturation magnetisation A/m A = 13.0e-12 #", "--> --> --> ----------------------------------- ^ ^ ^ ^ ^ ^ ^ ^ ^", "Exchange MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 * np.pi * 10**-7", "on. boundary = Lz / 2.0 expr_a = df.Expression((\"x[2] <= b ? 0", "Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points =", "10**-7 # vacuum permeability N/A^2 Ms = 1.0e6 # saturation magnetisation A/m A", "the upper half. This is a toy model of the exchange spring #", "? 0 : 1\", \"0\", \"x[2] <= b ? 1 : 0\"), b=boundary,", "----------------------------------- --> --> --> --> --> --> --> --> --> --> --> -->", "nx * L ny = 1 Ly = ny * L nz =", "3)) # magnetisation probed along z-axis for i, z in enumerate(zs): axis_zs[i] =", "<NAME> is working on. boundary = Lz / 2.0 expr_a = df.Expression((\"x[2] <=", "= a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax", "z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(zs,", "half of the film and # (1, 0, 0) in the upper half.", "df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length)", "30 Lz = nz * L mesh = df.BoxMesh(df.Point(0, 0, 0), df.Point(Lx, Ly,", "^ ^ ^ ^ ^ ^ ^ | | | | | |", "Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure() ax = fig.add_subplot(111)", "unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points = 200", "axis_zs[i] = a((Lx/2.0, Ly/2.0, z)) m_zs[i] = sim.m_field((Lx/2.0, Ly/2.0, z)) fig = plt.figure()", "= Field(V, expr_a) sim = Simulation(mesh, Ms, unit_length) sim.set_m((1, 0, 1)) sim.add(UniaxialAnisotropy(K1, a))", "points = 200 zs = np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3)) #", "0 : 1\", \"0\", \"x[2] <= b ? 1 : 0\"), b=boundary, degree=1)", "that <NAME> is working on. boundary = Lz / 2.0 expr_a = df.Expression((\"x[2]", "# (1, 0, 0) in the upper half. This is a toy model", "coupling strength J/m Km = 0.5 * mu0 * Ms**2 # magnetostatic energy", "= df.Expression((\"x[2] <= b ? 0 : 1\", \"0\", \"x[2] <= b ?", "matplotlib as mpl mpl.use('Agg') import pylab import dolfin as df import matplotlib.pyplot as", "0.5 * mu0 * Ms**2 # magnetostatic energy density scale kg/ms^2 lexch =", "label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\", label=\"m_x\") ax.plot(zs, m_zs[:,", "np.zeros((points, 3)) # magnetisation probed along z-axis for i, z in enumerate(zs): axis_zs[i]", "Lz, points) axis_zs = np.zeros((points, 3)) # easy axis probed along z-axis m_zs", "0, 0) in the upper half. This is a toy model of the", "| | | | | | | | | | | ----------------------------------- \"\"\"", "is working on. boundary = Lz / 2.0 expr_a = df.Expression((\"x[2] <= b", "1)) sim.add(UniaxialAnisotropy(K1, a)) sim.add(Exchange(A)) sim.relax() if plot: points = 200 zs = np.linspace(0,", "\"x[2] <= b ? 1 : 0\"), b=boundary, degree=1) V = df.VectorFunctionSpace(mesh, \"DG\",", "mpl mpl.use('Agg') import pylab import dolfin as df import matplotlib.pyplot as plt from", "--> --> --> --> --> --> ----------------------------------- ^ ^ ^ ^ ^ ^", "import numpy as np import matplotlib as mpl mpl.use('Agg') import pylab import dolfin", "= 200 zs = np.linspace(0, Lz, points) axis_zs = np.zeros((points, 3)) # easy", "--> --> --> --> --> --> --> --> --> --> --> --> -->", "degree=1) V = df.VectorFunctionSpace(mesh, \"DG\", 0, dim=3) a = Field(V, expr_a) sim =", "os.path.dirname(os.path.abspath(__file__)) def run_simulation(plot=False): mu0 = 4.0 * np.pi * 10**-7 # vacuum permeability", "A/m A = 13.0e-12 # exchange coupling strength J/m Km = 0.5 *", "= nx * L ny = 1 Ly = ny * L nz", "axis_zs[:, 0], \"-o\", label=\"a_x\") ax.plot(zs, axis_zs[:, 2], \"-x\", label=\"a_z\") ax.plot(zs, m_zs[:, 0], \"-\",", "magnetisation A/m A = 13.0e-12 # exchange coupling strength J/m Km = 0.5" ]
[ "user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers', blank=True,", "validate_image UserModel = get_user_model() class Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture =", ") about = models.TextField( blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True )", "import get_user_model from django.db import models # Create your models here. from MetioTube.core.validators", "django.contrib.auth import get_user_model from django.db import models # Create your models here. from", "= get_user_model() class Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image',", "resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField( blank=True ) user = models.OneToOneField( UserModel,", "= models.TextField( blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers =", "import validate_image UserModel = get_user_model() class Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture", "username = models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about", "blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel,", "django.db import models # Create your models here. from MetioTube.core.validators import validate_image UserModel", "max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField( blank=True", "= models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about =", "CloudinaryField from django.contrib.auth import get_user_model from django.db import models # Create your models", "models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers', blank=True, ) def", "profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField( blank=True ) user", "Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) )", "MetioTube.core.validators import validate_image UserModel = get_user_model() class Profile(models.Model): username = models.CharField( max_length=30 )", "models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField(", "Create your models here. from MetioTube.core.validators import validate_image UserModel = get_user_model() class Profile(models.Model):", "from django.db import models # Create your models here. from MetioTube.core.validators import validate_image", "get_user_model() class Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True,", "UserModel = get_user_model() class Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture = CloudinaryField(", "here. from MetioTube.core.validators import validate_image UserModel = get_user_model() class Profile(models.Model): username = models.CharField(", "cloudinary.models import CloudinaryField from django.contrib.auth import get_user_model from django.db import models # Create", ") user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers',", "validators=(validate_image,) ) about = models.TextField( blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True", "= CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField( blank=True ) user =", "UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers', blank=True, ) def __str__(self):", "on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers', blank=True, ) def __str__(self): return", "from MetioTube.core.validators import validate_image UserModel = get_user_model() class Profile(models.Model): username = models.CharField( max_length=30", "import CloudinaryField from django.contrib.auth import get_user_model from django.db import models # Create your", "class Profile(models.Model): username = models.CharField( max_length=30 ) profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,)", "models.TextField( blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField(", "CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField( blank=True ) user = models.OneToOneField(", "from django.contrib.auth import get_user_model from django.db import models # Create your models here.", "models # Create your models here. from MetioTube.core.validators import validate_image UserModel = get_user_model()", ") profile_picture = CloudinaryField( resource_type='image', blank=True, validators=(validate_image,) ) about = models.TextField( blank=True )", "primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers', blank=True, ) def __str__(self): return self.username", "models here. from MetioTube.core.validators import validate_image UserModel = get_user_model() class Profile(models.Model): username =", "from cloudinary.models import CloudinaryField from django.contrib.auth import get_user_model from django.db import models #", "import models # Create your models here. from MetioTube.core.validators import validate_image UserModel =", "blank=True, validators=(validate_image,) ) about = models.TextField( blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE,", "= models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers = models.ManyToManyField( UserModel, related_name='subscribers', blank=True, )", "about = models.TextField( blank=True ) user = models.OneToOneField( UserModel, on_delete=models.CASCADE, primary_key=True ) subscribers", "your models here. from MetioTube.core.validators import validate_image UserModel = get_user_model() class Profile(models.Model): username", "# Create your models here. from MetioTube.core.validators import validate_image UserModel = get_user_model() class", "get_user_model from django.db import models # Create your models here. from MetioTube.core.validators import" ]
[ "in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close()", "'\\n': continue fl_list = fl.split() gene = fl_list[c-1] if g == 'T': gene", "prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output file in fasta", "'Csa' not in fl: continue elif fl == '\\n': continue fl_list = fl.split()", "= fl.split() gene = fl_list[c-1] if g == 'T': gene = gene.split('.')[0] gene", "in fl: fl_list = fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start =", "start, end] return D def read_id(infile, c=1, g='F'): gene_list = [] with open(infile)", "1', type=int, default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default is FALSE'\", default='F',", "type=int, default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default is FALSE'\", default='F', choices=['T',", "def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list:", "'T': gene = gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list", "gene id, default is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111',", "'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile): D={}", "gene id as 'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args()", "id as 'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args() def", "gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3')", "= gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict =", "== '\\n': continue fl_list = fl.split() gene = fl_list[c-1] if g == 'T':", "import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter', usage='PROG [options]')", "gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl)", "for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq)", "parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default", "with open(infile) as f: for fl in f: if 'gene' in fl: fl_list", "= gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D =", "seq by gene id import argparse from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive", "for genome seq by gene id import argparse from Bio import SeqIO parser", "Linxzh # retrive gene seq for genome seq by gene id import argparse", "outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq =", "parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F']) args", "continue elif 'Csa' not in fl: continue elif fl == '\\n': continue fl_list", "sequence by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o',", "in f: if '#' in fl: continue elif 'Csa' not in fl: continue", "usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output file in fasta format',", "__name__ == '__main__': gene_list = read_id(args.i, c = args.c, g = args.g) get_seq(gene_list,", "= int(fl_list[4]) D[geneid] = [chrom, start, end] return D def read_id(infile, c=1, g='F'):", "gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D", "fl == '\\n': continue fl_list = fl.split() gene = fl_list[c-1] if g ==", "% (gene, seq) outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list = read_id(args.i, c", "default is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default is", "seq) outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list = read_id(args.i, c = args.c,", "Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter', usage='PROG", "ids') parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id,", "fl_list[c-1] if g == 'T': gene = gene.split('.')[0] gene = gene.replace('P','G') gene =", "def read_id(infile, c=1, g='F'): gene_list = [] with open(infile) as f: for fl", "with open(infile) as f: for fl in f: if '#' in fl: continue", "as f: for fl in f: if '#' in fl: continue elif 'Csa'", "of gene id, default is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene id as", "gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile):", "outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list = read_id(args.i, c = args.c, g", "D[geneid] = [chrom, start, end] return D def read_id(infile, c=1, g='F'): gene_list =", "'gene' in fl: fl_list = fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start", "fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default is 1', type=int, default=1)", "gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output file", "f: if 'gene' in fl: fl_list = fl.split() chrom = fl_list[0] geneid =", "contains gene ids') parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of", "fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n'", "seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if __name__", "fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid]", "gene = fl_list[c-1] if g == 'T': gene = gene.split('.')[0] gene = gene.replace('P','G')", "gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta'))", "FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile): D={} with open(infile) as", "retrive gene seq for genome seq by gene id import argparse from Bio", "[options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w'))", "elif 'Csa' not in fl: continue elif fl == '\\n': continue fl_list =", "gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if", "for fl in f: if '#' in fl: continue elif 'Csa' not in", "pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl =", "if 'gene' in fl: fl_list = fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:]", "[chrom, start, end] return D def read_id(infile, c=1, g='F'): gene_list = [] with", "default is FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile): D={} with", "f: if '#' in fl: continue elif 'Csa' not in fl: continue elif", "help='file contains gene ids') parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column", "= parser.parse_args() def pos_dict(infile): D={} with open(infile) as f: for fl in f:", "'#' in fl: continue elif 'Csa' not in fl: continue elif fl ==", "<reponame>l0o0/bio-analysis-kit #!/bin/python # 2014-11-4 Linxzh # retrive gene seq for genome seq by", "as f: for fl in f: if 'gene' in fl: fl_list = fl.split()", "as 'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile):", "args = parser.parse_args() def pos_dict(infile): D={} with open(infile) as f: for fl in", "int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid] = [chrom, start, end] return D", "'F']) args = parser.parse_args() def pos_dict(infile): D={} with open(infile) as f: for fl", "gene ids') parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene", "by gene id import argparse from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene", "open(infile) as f: for fl in f: if '#' in fl: continue elif", "int(fl_list[4]) D[geneid] = [chrom, start, end] return D def read_id(infile, c=1, g='F'): gene_list", "g == 'T': gene = gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene)", "read_id(infile, c=1, g='F'): gene_list = [] with open(infile) as f: for fl in", "def pos_dict(infile): D={} with open(infile) as f: for fl in f: if 'gene'", "get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq", "= pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl", "= fl_list[c-1] if g == 'T': gene = gene.split('.')[0] gene = gene.replace('P','G') gene", "f: for fl in f: if 'gene' in fl: fl_list = fl.split() chrom", "= fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end = int(fl_list[4])", "end = int(fl_list[4]) D[geneid] = [chrom, start, end] return D def read_id(infile, c=1,", "chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end =", "return gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene", "wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list", "D def read_id(infile, c=1, g='F'): gene_list = [] with open(infile) as f: for", "SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene,", "# retrive gene seq for genome seq by gene id import argparse from", "parser.parse_args() def pos_dict(infile): D={} with open(infile) as f: for fl in f: if", "D={} with open(infile) as f: for fl in f: if 'gene' in fl:", "file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default is 1',", "- 1 end = int(fl_list[4]) D[geneid] = [chrom, start, end] return D def", "gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for", "pos_dict(infile): D={} with open(infile) as f: for fl in f: if 'gene' in", "(gene, seq) outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list = read_id(args.i, c =", "id, default is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default", "#!/bin/python # 2014-11-4 Linxzh # retrive gene seq for genome seq by gene", "= fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid] = [chrom,", "gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in", "fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid] = [chrom, start,", "gene = gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list def", "fl: fl_list = fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3])", "return D def read_id(infile, c=1, g='F'): gene_list = [] with open(infile) as f:", "default='F', choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile): D={} with open(infile) as f:", "= gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list,", "help=\"format gene id as 'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F']) args =", "if '#' in fl: continue elif 'Csa' not in fl: continue elif fl", "gene = gene.replace('M','G') gene_list.append(gene) return gene_list def get_seq(gene_list, outfile): D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict", "= argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains", "g='F'): gene_list = [] with open(infile) as f: for fl in f: if", "argparse from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by gene id',", "in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default is 1', type=int,", "type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default is 1', type=int, default=1) parser.add_argument('-g', help=\"format", "= [] with open(infile) as f: for fl in f: if '#' in", "f: for fl in f: if '#' in fl: continue elif 'Csa' not", "== 'T': gene = gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G') gene_list.append(gene) return", "help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default is", "if g == 'T': gene = gene.split('.')[0] gene = gene.replace('P','G') gene = gene.replace('M','G')", "= fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1", "== '__main__': gene_list = read_id(args.i, c = args.c, g = args.g) get_seq(gene_list, args.o)", "for fl in f: if 'gene' in fl: fl_list = fl.split() chrom =", "fl_list = fl.split() gene = fl_list[c-1] if g == 'T': gene = gene.split('.')[0]", "from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter',", "[] with open(infile) as f: for fl in f: if '#' in fl:", "seq for genome seq by gene id import argparse from Bio import SeqIO", "help='column of gene id, default is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene id", "str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if __name__ == '__main__':", "in fl: continue elif fl == '\\n': continue fl_list = fl.split() gene =", "fl.split() gene = fl_list[c-1] if g == 'T': gene = gene.split('.')[0] gene =", "fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end", "fl in f: if '#' in fl: continue elif 'Csa' not in fl:", "parser = argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file", "continue elif fl == '\\n': continue fl_list = fl.split() gene = fl_list[c-1] if", "# 2014-11-4 Linxzh # retrive gene seq for genome seq by gene id", "in fl: continue elif 'Csa' not in fl: continue elif fl == '\\n':", "fl: continue elif fl == '\\n': continue fl_list = fl.split() gene = fl_list[c-1]", "end] return D def read_id(infile, c=1, g='F'): gene_list = [] with open(infile) as", "format', type=argparse.FileType('w')) parser.add_argument('-c', help='column of gene id, default is 1', type=int, default=1) parser.add_argument('-g',", "import argparse from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by gene", "= [chrom, start, end] return D def read_id(infile, c=1, g='F'): gene_list = []", "2014-11-4 Linxzh # retrive gene seq for genome seq by gene id import", "id import argparse from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by", "genome seq by gene id import argparse from Bio import SeqIO parser =", "gene sequence by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids')", "is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default is FALSE'\",", "in f: if 'gene' in fl: fl_list = fl.split() chrom = fl_list[0] geneid", "fl: continue elif 'Csa' not in fl: continue elif fl == '\\n': continue", "argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene", "1 end = int(fl_list[4]) D[geneid] = [chrom, start, end] return D def read_id(infile,", "parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output file in fasta format', type=argparse.FileType('w')) parser.add_argument('-c',", "= SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' %", "fl in f: if 'gene' in fl: fl_list = fl.split() chrom = fl_list[0]", "default=1) parser.add_argument('-g', help=\"format gene id as 'Csa1G000111', default is FALSE'\", default='F', choices=['T', 'F'])", "open(infile) as f: for fl in f: if 'gene' in fl: fl_list =", "gene id import argparse from Bio import SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence", "elif fl == '\\n': continue fl_list = fl.split() gene = fl_list[c-1] if g", "id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output file in", "by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i', help='file contains gene ids') parser.add_argument('-o', help='output", "continue fl_list = fl.split() gene = fl_list[c-1] if g == 'T': gene =", "gene_list = [] with open(infile) as f: for fl in f: if '#'", "SeqIO parser = argparse.ArgumentParser(description='Retrive gene sequence by gene id', prog='SeqGeter', usage='PROG [options]') parser.add_argument('-i',", "if __name__ == '__main__': gene_list = read_id(args.i, c = args.c, g = args.g)", "is FALSE'\", default='F', choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile): D={} with open(infile)", "fl_list = fl.split() chrom = fl_list[0] geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) -", "'>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list = read_id(args.i,", "parser.add_argument('-c', help='column of gene id, default is 1', type=int, default=1) parser.add_argument('-g', help=\"format gene", "outfile.close() if __name__ == '__main__': gene_list = read_id(args.i, c = args.c, g =", "gene seq for genome seq by gene id import argparse from Bio import", "= '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if __name__ == '__main__': gene_list =", "= str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq) wl = '>%s\\n%s\\n' % (gene, seq) outfile.write(wl) outfile.close() if __name__ ==", "choices=['T', 'F']) args = parser.parse_args() def pos_dict(infile): D={} with open(infile) as f: for", "= int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid] = [chrom, start, end] return", "c=1, g='F'): gene_list = [] with open(infile) as f: for fl in f:", "D = pos_dict('/share/fg3/Linxzh/Data/Cucumber_ref/Cucumber_20101104.gff3') fa_dict = SeqIO.to_dict(SeqIO.parse('/share/fg3/Linxzh/Data/Cucumber_ref/whole_genome/origin/domestic_Chr_20101102.fa','fasta')) for gene in gene_list: seq = str(fa_dict[D[gene][0]][D[gene][1]:D[gene][2]].seq)", "not in fl: continue elif fl == '\\n': continue fl_list = fl.split() gene", "start = int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid] = [chrom, start, end]", "geneid = fl_list[8].split(';')[0][3:] start = int(fl_list[3]) - 1 end = int(fl_list[4]) D[geneid] =" ]
[ "settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response", "api_response return info except ApiException as e: return None def get_team_events(teamkey): configuration =", "tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return info except", "configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info", "tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey))", "= tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return info", "str(teamkey)) info = api_response return info except ApiException as e: return None def", "= settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info =", "= api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return info except ApiException as e:", "None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try:", "api_response = api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return info except ApiException as", "tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey))", "= tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return info", "= api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return info except ApiException as e:", "= tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" +", "settings import tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key']", "= tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" +", "e: return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance =", "api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return info except ApiException as e: return", "api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return", "api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return info except ApiException as e: return", "from django.conf import settings import tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration", "ApiException as e: return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY", "def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response", "api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return info except ApiException as", "import tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] =", "try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info = api_response return info except ApiException", "configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info", "try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return info except ApiException", "import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration))", "except ApiException as e: return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] =", "as e: return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance", "django.conf import settings import tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration =", "configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\"", "return info except ApiException as e: return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration()", "= api_response return info except ApiException as e: return None def get_team_events(teamkey): configuration", "settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info = api_response", "api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return", "tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\" + str(teamkey)) info = api_response return info except", "get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response =", "ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try:", "<gh_stars>1-10 from django.conf import settings import tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey):", "tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY", "return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration))", "import settings import tbaapiv3client from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration()", "get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response =", "= settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team(\"frc\" + str(teamkey)) info =", "tbaapiv3client.rest import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance =", "def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response", "configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration)) try: api_response = api_instance.get_team_events(\"frc\"", "info except ApiException as e: return None def get_team_events(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key']", "+ str(teamkey)) info = api_response return info except ApiException as e: return None", "info = api_response return info except ApiException as e: return None def get_team_events(teamkey):", "from tbaapiv3client.rest import ApiException def get_team(teamkey): configuration = tbaapiv3client.Configuration() configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY api_instance" ]
[ "dates = [np.datetime64(x) for x in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif", "from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates =", "print('test: ',test) print('exp: ',expected) test = test.values if mytype=='xda' else test assert np.all(test==expected)", "for x in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates =", "',test) print('exp: ',expected) test = test.values if mytype=='xda' else test assert np.all(test==expected) def", "',expected) test = test.values if mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make", "mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates)", "in dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in dates] if", "import warnings from datetime import datetime import numpy as np import xarray as", "test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test = test.values if mytype=='xda' else", "xarray as xr import pytest import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single'])", "np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates))", "warnings from datetime import datetime import numpy as np import xarray as xr", "get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x", "dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates = np.array(dates,dtype='datetime64[s]') dates =", "@pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in", "test_get_grid(get_test_ds): \"\"\"make sure we can make a grid ... that's it\"\"\" grid =", "zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test = test.values if mytype=='xda' else test assert", ".test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2],", "for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test = test.values if mytype=='xda'", "if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0]", "dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test)", "elif mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0] test_out =", "hour=x[3],minute=x[4],second=x[5]) for x in dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x", "in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test = test.values if mytype=='xda' else test", "xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0] test_out", "as np import xarray as xr import pytest import ecco_v4_py from .test_common import", "dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0] dates =", "test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can make a grid ...", "np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in", "print('exp: ',expected) test = test.values if mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds):", "datetime import numpy as np import xarray as xr import pytest import ecco_v4_py", "mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can make a", "= np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in dates] if mytype=='xda': dates =", "dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test", "x in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates)", "np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can make a grid ... that's it\"\"\"", "import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5])", "import xarray as xr import pytest import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds", "if mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can make", "import numpy as np import xarray as xr import pytest import ecco_v4_py from", "dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates =", "test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test =", "= ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test = test.values", "mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0] dates", "mytype=='single': dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test:", "dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif mytype=='single':", "else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can make a grid", "def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints]", "\"\"\"make sure we can make a grid ... that's it\"\"\" grid = ecco_v4_py.get_llc_grid(get_test_ds)", "dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in dates] if mytype=='xda':", "= xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0]", "test.values if mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can", "numpy as np import xarray as xr import pytest import ecco_v4_py from .test_common", "all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for", "x in dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in dates]", "dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in dates] if mytype=='xda': dates", "dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp:", "= test.values if mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we", "assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure we can make a grid ... that's", "import pytest import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints", "import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]]", "= [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x)", "= dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected)", "pytest import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints =", "= np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected", "= [np.datetime64(x) for x in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr':", "in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates = np.array(dates) elif", "dates = np.array(dates) elif mytype=='single': dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for", "as xr import pytest import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def", "ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T): print('test: ',test) print('exp: ',expected) test = test.values if", "ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates", "elif mytype=='single': dints=dints[0] dates = dates[0] test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates) for test,expected in zip(test_out,np.array(dints).T):", "[[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates = np.array(dates,dtype='datetime64[s]') dates", "[np.datetime64(x) for x in dates] if mytype=='xda': dates = xr.DataArray(np.array(dates)) elif mytype=='nparr': dates", "import datetime import numpy as np import xarray as xr import pytest import", "datetime import datetime import numpy as np import xarray as xr import pytest", "def test_get_grid(get_test_ds): \"\"\"make sure we can make a grid ... that's it\"\"\" grid", "np import xarray as xr import pytest import ecco_v4_py from .test_common import all_mds_datadirs,", "= [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates = np.array(dates,dtype='datetime64[s]')", "[datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for", "test = test.values if mytype=='xda' else test assert np.all(test==expected) def test_get_grid(get_test_ds): \"\"\"make sure", "test_extract_dates(mytype): dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]] dates = [datetime(year=x[0],month=x[1],day=x[2], hour=x[3],minute=x[4],second=x[5]) for x in dints] dates", "from datetime import datetime import numpy as np import xarray as xr import", "for x in dints] dates = np.array(dates,dtype='datetime64[s]') dates = [np.datetime64(x) for x in", "xr import pytest import ecco_v4_py from .test_common import all_mds_datadirs, get_test_ds @pytest.mark.parametrize(\"mytype\",['xda','nparr','list','single']) def test_extract_dates(mytype):" ]
[ "import PromptToolkitSession, PromptToolkitSSHServer # __all__ = [ # \"PromptToolkitSession\", # \"PromptToolkitSSHServer\", # ]", ".server import PromptToolkitSession, PromptToolkitSSHServer # __all__ = [ # \"PromptToolkitSession\", # \"PromptToolkitSSHServer\", #", "# from .server import PromptToolkitSession, PromptToolkitSSHServer # __all__ = [ # \"PromptToolkitSession\", #", "from .server import PromptToolkitSession, PromptToolkitSSHServer # __all__ = [ # \"PromptToolkitSession\", # \"PromptToolkitSSHServer\"," ]
[ "'C9300 48P DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network", "DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48", "'license_usage':{ 'C9300 48P DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P", "}, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P", "}, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA Advantage', 'count':'1', 'status':'AUTHORIZED' } } }", "{ 'license_usage':{ 'C9300 48P DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300", "Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1',", "}, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P", "Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED'", "= { 'license_usage':{ 'C9300 48P DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' },", "expected_output = { 'license_usage':{ 'C9300 48P DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED'", "'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300", "'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...',", "'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA", "'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network", "Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network", "'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...',", "Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED'", "'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' },", "Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network", "Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24", "'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA Advantage', 'count':'1', 'status':'AUTHORIZED' } }", "DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2',", "48P Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{", "48P DNA Advantage':{ 'entitlement':'C9300-48 DNA Advantage', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 48P Network Adv...':{", "Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA Advantage', 'count':'1', 'status':'AUTHORIZED'", "'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300", "'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA Advantage', 'count':'1', 'status':'AUTHORIZED' }", "24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{", "'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA Advantage',", "Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA", "'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24 Network Advan...', 'count':'1', 'status':'AUTHORIZED' },", "Network Advan...', 'count':'1', 'status':'AUTHORIZED' }, 'C9300 24P DNA Advantage':{ 'entitlement':'C9300-24 DNA Advantage', 'count':'1',", "Network Adv...':{ 'entitlement':'C9300-48 Network Advan...', 'count':'2', 'status':'AUTHORIZED' }, 'C9300 24P Network Adv...':{ 'entitlement':'C9300-24" ]
[ "osp import policy_generator as pg import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({", "model = UserModel.UserModel() return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy()", "get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests =", "file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path():", "True, 'pgf.rcfonts': False, }) import numpy as np def module_from_file(module_name, file_path): spec =", "Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list,", "tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state],", "def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\":", "example_path = get_example_path() config_filename = get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename)", "for i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color", "User Model model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict", "90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = [] f = plt.figure()", "osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event files uploaded!')", "Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path =", "file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list =", "range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key))", "if events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\",", "osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel", "as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, })", "policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path) # Read Config", "= get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path) ##########################################################################################", "else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if", "color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = [] f = plt.figure() f.set_figwidth(15)", "enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle", "locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file", "module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt')", "get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File Names", "events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x)", "for j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle =", "if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename)", "get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename)", "'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import numpy as np def module_from_file(module_name, file_path):", "UserModel.UserModel() return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list,", "= get_example_path() config_filename = get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename,", "locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path)", "inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel()", "importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath", "as pg import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family':", "matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import numpy", "numpy as np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec)", "as np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module)", "which contain interactions and event details for a time step interactions_files_list=None events_files_list=None if", "{(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for", "color_dict.keys(): i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection,", "tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close()", "agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename", "x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list def", "uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted')", "total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state", "/= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper", "linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\") plt.ylabel(\"Population", "Reading through a file (for interactions/events) that contain file names which contain interactions", "i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \")", "get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list)", "get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def get_policy(example_path): Generate_policy", "sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename)", "locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through", "time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda", "using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj)", "config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None", "matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import numpy as", "= world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j", "in color_dict.keys(): i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict,", "pattern = ['dashed','solid','dotted'] tdicts = [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key", "step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x", "= key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days,", "that contain file names which contain interactions and event details for a time", "= {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8)", "get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern =", "\"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import numpy as np def", "def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else:", "import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts':", "return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for", "print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]:", "[] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j = key", "matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True,", "get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename =", "event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path) #", "i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days,", "interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model", "config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if", ",eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel =", "return interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return", "# File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return", "a file (for interactions/events) that contain file names which contain interactions and event", "for a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else:", "}) import numpy as np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module", "spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def", "policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost =", "interactions/events) that contain file names which contain interactions and event details for a", "module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py'))", "1000 plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right',", "module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def", "plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j = key policy_list, event_restriction_fn =", "fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts", "def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def get_policy(example_path):", "False, }) import numpy as np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path)", ": osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list def get_model(example_path):", "contain interactions and event details for a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='':", "interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model", "model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'}", "f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests,", "for key in color_dict.keys(): i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j)", "get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path", "import numpy as np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module =", "__name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration", "events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for interactions/events) that contain", "# User Model model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90", "num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = [] f", "interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda", "interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x :", "= open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts =", "osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename =", "files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events", "interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def", "pg import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif',", "world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict)", "= pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\") plt.ylabel(\"Population proportion\")", "return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn", "i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color =", "interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path) ########################################################################################## fp =", "pickle import World import importlib.util import os.path as osp import policy_generator as pg", "\"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import numpy as np", "in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key],", "import pickle import World import importlib.util import os.path as osp import policy_generator as", "import World import importlib.util import os.path as osp import policy_generator as pg import", "= importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1]", "np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return", "= importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return", ": osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event files", "locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename,", "agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for interactions/events)", "inputted') if config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x)", "= pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\")", "details for a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!')", "j in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i],", "import sys import ReadFile import pickle import World import importlib.util import os.path as", ",interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event files uploaded!') else:", "= module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path()", "open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = []", "=state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\") plt.ylabel(\"Population proportion\") # plt.show() plt.savefig('multi_SIR.pgf')", "policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path)", "else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a", "module def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): #", "events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\")", "Model model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict =", "config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading", "get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for interactions/events) that contain file names which", "names which contain interactions and event details for a time step interactions_files_list=None events_files_list=None", "return module def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj):", "config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if", "agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model", "Events inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model =", "f.set_figheight(8) for key in color_dict.keys(): i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i,", "color = color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12,", ": \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j]", "total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for", "'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import numpy as np def module_from_file(module_name,", "importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1] def", "and event details for a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction", "osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list,", "Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename,", "eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return interactions_files_list,", "event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path) # Read Config file", "events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj):", "plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True,", "events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model =", "events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py'))", "interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for interactions/events) that", "print('No Events inputted') return interactions_files_list, events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model", "import policy_generator as pg import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\":", "event details for a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files", "tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for", "import ReadFile import pickle import World import importlib.util import os.path as osp import", "through a file (for interactions/events) that contain file names which contain interactions and", "return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None", "a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename)", "j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\")", "(for interactions/events) that contain file names which contain interactions and event details for", "<reponame>suryadheeshjith/episimmer import sys import ReadFile import pickle import World import importlib.util import os.path", "interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='':", "= module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\",", "config_filename = get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename,", "model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if", "events_files_list def get_model(example_path): UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def", "else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return", "event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False)", "= ['dashed','solid','dotted'] tdicts = [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in", "Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list", "def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list)", "as osp import policy_generator as pg import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\")", "Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No", "print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x", "'text.usetex': True, 'pgf.rcfonts': False, }) import numpy as np def module_from_file(module_name, file_path): spec", "interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No", "File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename,", "color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\")", "# Reading through a file (for interactions/events) that contain file names which contain", "matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False,", "file names which contain interactions and event details for a time step interactions_files_list=None", "interactions and event details for a time step interactions_files_list=None events_files_list=None if config_obj.interactions_files_list=='': print('No", "# Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj)", "key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost", "x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted') if config_obj.events_files_list=='': print('No Event", "contain file names which contain interactions and event details for a time step", "= UserModel.UserModel() return model def get_policy(example_path): Generate_policy = module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return", "= get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename", "events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]: print('No Events inputted') return interactions_files_list, events_files_list", "ReadFile import pickle import World import importlib.util import os.path as osp import policy_generator", "get_example_path() config_filename = get_config_path(example_path) # Read Config file using ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\", "['dashed','solid','dotted'] tdicts = [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys():", "return config_filepath def get_file_paths(example_path,config_obj): # File Names locations_filename=None agents_filename=osp.join(example_path,config_obj.agents_filename) interactions_FilesList_filename=osp.join(example_path,config_obj.interactions_files_list) events_FilesList_filename=osp.join(example_path,config_obj.events_files_list) if config_obj.locations_filename==\"\":", "print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list)) if events_files_list==[]:", "wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()):", "Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No", "key in color_dict.keys(): i,j = key policy_list, event_restriction_fn = pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list)", "if config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x : osp.join(example_path,x) ,eventFiles_obj.file_list))", "in range(len(tdict[state])): tdict[state][j] /= 1000 plt.plot(tdict[state], color = color_dict[key], linestyle = pattern[i], label", "def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for interactions/events) that contain file names", "if config_obj.locations_filename==\"\": locations_filename=None else: locations_filename=osp.join(example_path,config_obj.locations_filename) return agents_filename, interactions_FilesList_filename, events_FilesList_filename, locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): #", "\") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /=", "= [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j =", "def get_example_path(): return sys.argv[1] def get_config_path(path): config_filepath=osp.join(path,'config.txt') return config_filepath def get_file_paths(example_path,config_obj): # File", "if __name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path) # Read Config file using", "import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex':", "config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User", "tdicts = [] f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j", "World import importlib.util import os.path as osp import policy_generator as pg import matplotlib", "total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in", "label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\") plt.ylabel(\"Population proportion\") # plt.show()", "UserModel = module_from_file(\"Generate_model\", osp.join(example_path,'UserModel.py')) model = UserModel.UserModel() return model def get_policy(example_path): Generate_policy =", "import importlib.util import os.path as osp import policy_generator as pg import matplotlib import", "get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path) ########################################################################################## fp", "pg.generate_group_testing_tests_policy(num_tests, i, j) world_obj=World.World(config_obj,model,policy_list,event_restriction_fn,agents_filename,interactions_files_list,locations_filename,events_files_list) tdict, total_infection, total_quarantined_days, wrongly_quarantined_days, total_test_cost = world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") :", "if config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list))", "importlib.util import os.path as osp import policy_generator as pg import matplotlib import matplotlib.pyplot", "os.path as osp import policy_generator as pg import matplotlib import matplotlib.pyplot as plt", "policy_generator as pg import matplotlib import matplotlib.pyplot as plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\",", "file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return sys.argv[1] def get_config_path(path):", "config_obj.interactions_files_list=='': print('No Interaction files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if", "plt matplotlib.use(\"pgf\") matplotlib.rcParams.update({ \"pgf.texsystem\": \"pdflatex\", 'font.family': 'serif', 'text.usetex': True, 'pgf.rcfonts': False, }) import", "'pgf.rcfonts': False, }) import numpy as np def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name,", "fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])):", "return policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename = get_config_path(example_path) # Read", "ReadFile.ReadConfiguration config_obj=ReadFile.ReadConfiguration(config_filename) agents_filename, interactions_FilesList_filename,\\ events_FilesList_filename, locations_filename = get_file_paths(example_path,config_obj) interactions_files_list, events_files_list = get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) #", "= get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj) # User Model model = get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests", "uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions inputted')", "sys import ReadFile import pickle import World import importlib.util import os.path as osp", "world_obj.simulate_worlds(plot=False) fp.write(\"(\"+str(i)+\",\"+str(j)+\") : \") fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j in", "file (for interactions/events) that contain file names which contain interactions and event details", "fp.write(str(tdict)+\"\\n\") tdicts.append(tdict) for i,state in enumerate(tdict.keys()): for j in range(len(tdict[state])): tdict[state][j] /= 1000", "= get_model(example_path) ########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern", "f = plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j = key policy_list,", "spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module def get_example_path(): return", "= color_dict[key], linestyle = pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1))", "= plt.figure() f.set_figwidth(15) f.set_figheight(8) for key in color_dict.keys(): i,j = key policy_list, event_restriction_fn", "fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\") plt.ylabel(\"Population proportion\") # plt.show() plt.savefig('multi_SIR.pgf') ###############################################################################################", "files uploaded!') else: interactionFiles_obj=ReadFile.ReadFilesList(interactions_FilesList_filename) interactions_files_list=list(map(lambda x : osp.join(example_path,x) ,interactionFiles_obj.file_list)) if interactions_files_list==[]: print('No Interactions", "import os.path as osp import policy_generator as pg import matplotlib import matplotlib.pyplot as", "module_from_file(\"Generate_policy\", osp.join(example_path,'Generate_policy.py')) policy_list, event_restriction_fn=Generate_policy.generate_policy() return policy_list, event_restriction_fn if __name__==\"__main__\": example_path = get_example_path() config_filename", "locations_filename def get_file_names_list(example_path,interactions_FilesList_filename,events_FilesList_filename,config_obj): # Reading through a file (for interactions/events) that contain file", "pattern[i], label =state+\"_\"+str(key)) fp.close() plt.legend(loc='upper right', shadow=True, bbox_to_anchor=(1.12, 1)) plt.xlabel(\"Timesteps\") plt.ylabel(\"Population proportion\") #", "def module_from_file(module_name, file_path): spec = importlib.util.spec_from_file_location(module_name, file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module", "= 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted'] tdicts = [] f =", "########################################################################################## fp = open(\"multi_SIR.txt\",\"w\") num_tests = 90 color_dict = {(1,1):'cyan',(2,1):'blue',(3,2):'grey',(4,2):'pink',(5,2):'orange',(5,3):'red',(6,2):'purple',(6,3):'green'} pattern = ['dashed','solid','dotted']", "Interactions inputted') if config_obj.events_files_list=='': print('No Event files uploaded!') else: eventFiles_obj=ReadFile.ReadFilesList(events_FilesList_filename) events_files_list=list(map(lambda x :" ]
[ ":type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self, name):", "something on the message stack :param code: :return: str \"\"\" if len(self.message_stack) >", "# pylint: disable=too-many-arguments def create_item(self, name, image_file, x, y, m, ph, r, e,", "return None else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a task in", "21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal method to process returned vision repsonse.", "math.pow(nx, 2)) * -1 elif adjusted < -270: ny = math.cos(radjusted * -1", "code=\"\", block=False): \"\"\" Gets messages from the socket. If code is blank, then", "270 or adjusted == -90: return (-1 * z), 0 else: if adjusted", "to the left at 90 degrees and if absolute is False, he'll be", "return int(response[1]) == 1 def reset_agent(self): \"\"\" Resets agent state back to a", "a vector of force to the hand moving it :param x: :type x:", "for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\",", "def send_force(self, x, y, absolute=False): \"\"\" Sends a vector of force to the", "not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the message '%s'\" % message)", "> 0: if code != \"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] ==", "if self.pagi_socket is None: raise RuntimeError(\"No open socket. Use connect() to open a", "either code is blank or it matches something on the message stack :param", "We additionally save the task file name so we can reset things if", "y_coord, description=None): \"\"\" Creates an item and drops into into PAGIworld. These items", "PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self,", "pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in a valid PagiWorld", "= math.acos(z / x) * 180 / math.pi + 90 else: if y", "If block is set to true, no exception will be thrown, but program", "we rotate to position specified from 0 (looking up), otherwise rotate him relative", "matches something on the message stack :param code: :return: str \"\"\" if len(self.message_stack)", ":return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's rotation back to 0", "self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set a state within", "to prevent bad calls. :param message: :type message: str :return: :raises: RuntimeError \"\"\"", "length of specified column_length. :param response: :param column_length: :return: \"\"\" vision = list()", "setting it's duration to zero (so that can't ever really be in a", "\"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper()", "just use connect directly without creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def", "== \"addForce\" and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message", "= math.acos(z / x) * 180 / math.pi + 270 adjusted = rotation", "bottom edge is touching something solid, otherwise he'll do nothing. :return: bool True", "Sends a vector force to the agent to move his body. If absolute", "def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in a", "Sets a reflex in PAGIworld to be carried out on conditions. :param name:", "\"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\",", "in PAGIworld. We additionally save the task file name so we can reset", "self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld", "command == \"sensorRequest\" and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in", "RuntimeError \"\"\" if self.__task_file == \"\" or self.__task_file is None: raise RuntimeError(\"Cannot reset", "\"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states from", "val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation in either degrees (0 -", "into a list of lists where each inner list is the length of", "= math.sqrt(ax ** 2 + ay ** 2) angle = math.acos(ay / hyp)", "vector force to the agent to move his body. If absolute is False,", "message to the socket. We make sure that the message is a valid", "If block is set to False, and there's no response from the socket,", "or self.__task_file is None: raise RuntimeError(\"Cannot reset task, no previous task file found\")", "prevent bad calls. :param message: :type message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket()", "ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx, ny def get_position(self): \"\"\"", "0: if code != \"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code", "\"\" self.__task_file = \"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port))", "= self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates an", "or (response[:len(code)] == code and response[len(code)] == \",\"): break else: self.message_stack.append(response) if block:", ":return: \"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else:", "pre-built into PAGIworld. :param name: :param x: :param y: :param n: :return: \"\"\"", "self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)] == code and response[len(code)] == \",\"):", "2) - math.pow(nx, 2)) * -1 elif adjusted < 270: ny = math.cos(radjusted", "sure that we have an existing socket connection. If we don't, exception will", "0, z elif adjusted == 180 or adjusted == -180: return 0, (-1", "'%s' in message '%s'\" % (secondary, message)) elif command == \"addForce\" and secondary", "self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None else:", "180 degrees, and we tell him to rotate 90 degrees, if absolute is", "else: if adjusted > 0: if adjusted < 90: ny = math.cos(radjusted) *", "math.cos(radjusted * -1) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) *", "'L' \\ and not direction.upper() == 'RIGHT' and not direction.upper() == 'LEFT': raise", "/ math.pi else: angle = math.acos(z / x) * 180 / math.pi +", ":type pagi_socket: socket.socket :type __ip_address: str :type __port: int :type __timeout: float :type", "math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 else: nx = math.cos(radjusted - 270)", "str :type __port: int :type __timeout: float :type __message_fragment: str :type __task_file: str", "270: ny = math.cos(radjusted - 180) * z * -1 nx = math.sqrt(math.pow(z,", "lists where each inner list is the length of specified column_length. :param response:", "properties :param name: :param image_file: :param x: :param y: :param m: :param ph:", "\"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] #", "some number of degrees/radians. If absolute is True, then we rotate to position", "or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name,", "print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation):", "list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\"", "90 or adjusted == -270: return z, 0 elif adjusted == 270 or", "the first message from the socket, otherwise return the first matching message with", "\"\"\" Set a state within PAGIworld. :param name: :type name: str :param length:", "message[len(command)+1:].find(\",\") if end == -1: secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end +", "if y < 0: angle = 180 else: angle = 0 elif y", "messages from the socket. If code is blank, then we just return the", "Resets the task to the one that was loaded in self.load_task. If one", "\"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list of all", "be able to if his bottom edge is touching something solid, otherwise he'll", "!= 0 and y != 0: ax = math.fabs(x) ay = math.fabs(y) hyp", "not direction.upper() == 'L' \\ and not direction.upper() == 'RIGHT' and not direction.upper()", "a vector force to the agent to move his body. If absolute is", "agent to move his body. If absolute is False, then vectors are relative", "socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment = \"\"", "all other messages to a stack. If block is set to False, and", "> 0: vision.append(current) current = list() current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\"", "def get_rotation(self, degrees=True): \"\"\" Returns rotation in either degrees (0 - 359) or", "180 / math.pi else: angle = math.acos(z / x) * 180 / math.pi", "to true, no exception will be thrown, but program will stop in this", "+= self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index == -1: break else: response =", "be in a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def", "i) for i in range(0, 31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" %", "angle radjusted = adjusted * math.pi / 180 if adjusted == 0: return", "Attempts to return a message from the stack if (1) the stack isn't", "def __process_vision(response, column_length): \"\"\" Internal method to process returned vision repsonse. Splits the", "solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def", "math.pi else: angle = math.acos(z / x) * 180 / math.pi + 90", "x/y coordinates of the agent in the world :return: tuple(float, float) of coordinates", "else: if x != 0: z = math.fabs(x) else: z = math.fabs(y) nx,", "self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\" Attempts to return a message", "math.pow(ny, 2)) * -1 else: nx = math.cos(radjusted - 270) * z *", "message) end = message[len(command)+1:].find(\",\") if end == -1: secondary = message[len(command)+1:] else: secondary", "math.cos(radjusted * -1 - 180) * z * -1 nx = math.sqrt(math.pow(z, 2)", "x, y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\"", "the given :param ip: :param port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address ==", "at 270 degrees 0 90 agent 270 180 :param val: :type val: float", "not absolute or (x == 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x,", "= self.get_rotation() if x != 0 and y != 0: ax = math.fabs(x)", "Resets the agent's rotation back to 0 degrees (looking upward) :return: \"\"\" self.rotate(0,", "math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted < 270: ny =", "response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360 if degrees: rotation =", "!= 0: ax = math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax ** 2", "math.pi + 270 adjusted = rotation - angle radjusted = adjusted * math.pi", ":raises: RuntimeError \"\"\" if self.pagi_socket is None: raise RuntimeError(\"No open socket. Use connect()", "is True, then we rotate to position specified from 0 (looking up), otherwise", "not direction.upper() == 'LEFT': raise ValueError(\"You can only use a L or R", "y) * 180 / math.pi + 180 else: angle = math.acos(z / x)", "Resets agent state back to a starting position (looking upward with hands in", "__license__ = \"MIT\" import math import os import socket import time ERROR_CHECK =", "* -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted", "We make sure that the message is a valid action type, as well", "in the message '%s'\" % message) end = message[len(command)+1:].find(\",\") if end == -1:", "if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port", "rotation = rotation * 180 / math.pi return rotation def move_paces(self, paces, direction='L'):", "< 270: ny = math.cos(radjusted - 180) * z * -1 nx =", "% self.hand) def grab(self): \"\"\" Closes the hand, grabbing anything it is touching", "+y is always in direction of top of agent, -y is bottom, +x", "from the socket, otherwise return the first matching message with that code, saving", "not direction.upper() == 'RIGHT' and not direction.upper() == 'LEFT': raise ValueError(\"You can only", "Finish and simplify :param x: :param y: :param z: :param rotation: :return: \"\"\"", "Creates an item and drops into into PAGIworld. These items are the ones", "\"\"\" Attempts to return a message from the stack if (1) the stack", "1] if command == \"sensorRequest\" and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor", "== 1 def reset_agent(self): \"\"\" Resets agent state back to a starting position", "2)) * -1 elif adjusted < -270: ny = math.cos(radjusted * -1 -", "16 (columns) points which contains all of his periphal vision. vision[0][0] represents lower", "float(response[2]) def release(self): \"\"\" Opens the hand, releasing anything it could be holding", "if command == \"\" or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found", "first message from the socket, otherwise return the first matching message with that", "RuntimeError(\"Task file at '%s' was not found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\"", "= self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\" Sets a reflex", "in range(1, len(response)): if (j - 1) % column_length == 0: if len(current)", "he's looking. Therefore, if he's looking down at 180 degrees, and we tell", "that the message is a valid action type, as well verify that if", "name so we can reset things if necessary :param task_file: :type task_file: str", "2) angle = math.acos(ay / hyp) z = math.sin(angle) * ay else: if", "the task file name so we can reset things if necessary :param task_file:", "= math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax ** 2 + ay **", "a stack. If block is set to False, and there's no response from", "command == \"\" or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in", "- 270) * z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2))", "creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that", "def create_item(self, name, image_file, x, y, m, ph, r, e, k, degrees=True): \"\"\"", "* -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx = math.cos(radjusted", ":param message: :type message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command", "raise ValueError(\"You must pass in a valid PagiWorld variable to PagiAgent\") self.pagi_world =", "of lists where each inner list is the length of specified column_length. :param", "= self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\" Opens the hand,", "\"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint:", "math.pow(nx, 2)) * -1 elif adjusted < 270: ny = math.cos(radjusted - 180)", "2*pi) of agent (0 is looking upward) :param degrees: :type degrees: bool :return:", "response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal method", "not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index == -1:", "angle = math.acos(z / x) * 180 / math.pi + 90 else: if", "= [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS =", "hand moving it :param x: :type x: float :param y: :type y: float", "angle = math.acos(z / y) * 180 / math.pi + 180 else: angle", "(name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint:", "\"\"\" :type pagi_socket: socket.socket :type __ip_address: str :type __port: int :type __timeout: float", ":type absolute: bool :return: \"\"\" if not degrees: val = val * 180.", "the first matching message with that code, saving all other messages to a", "self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation in either degrees", "PAGIworld. These items are the ones pre-built into PAGIworld. :param name: :param x:", "anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def", "\"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type", "his left side. If absolute is true, then vector +y is world up,", "'%s'\" % (secondary, message)) # all messages must end with \\n if message[-1]", "index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] == \",\": return", "PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass", "specified properties :param name: :param image_file: :param x: :param y: :param m: :param", "pass in a valid PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand =", "returned vision repsonse. Splits the response into a list of lists where each", "def disconnect(self): \"\"\" Close the socket to PAGIWorld and then reset internal variables", "y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks that the given", "\"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" % i)", "PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not", "of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return", "the ones pre-built into PAGIworld. :param name: :param x: :param y: :param n:", "self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list of all states", "j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\",", "the agent in the world :return: tuple(float, float) of coordinates of agent \"\"\"", "a reflex completely from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\")", "agent, -y is bottom, +x is towards his right side, -x is his", ":return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list of", "% column_length == 0: if len(current) > 0: vision.append(current) current = list() current.append(response[j])", "self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name, image_file, x, y, m, ph, r,", "a state within PAGIworld. :param name: :type name: str :param length: :type length:", ":return: \"\"\" text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name, length):", "absolute is True, then we rotate to position specified from 0 (looking up),", "__ip_address: str :type __port: int :type __timeout: float :type __message_fragment: str :type __task_file:", "def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket to the given :param", "to return a message from the stack if (1) the stack isn't empty", "world bottom, +x is world right and -x is world left. :param x:", "if absolute is True, he'll be looking to the left at 90 degrees", "= math.acos(ay / hyp) z = math.sin(angle) * ay else: if x !=", ":param val: :type val: float :param degrees: :type degrees: bool :param absolute: :type", "Checks that the given direction is either left or right, and if it", "reset_rotation(self): \"\"\" Resets the agent's rotation back to 0 degrees (looking upward) :return:", "to the given :param ip: :param port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address", "column_length: :return: \"\"\" vision = list() current = list() for j in range(1,", "to a stack. If block is set to False, and there's no response", ":param column_length: :return: \"\"\" vision = list() current = list() for j in", "right side, -x is his left side. If absolute is true, then vector", "to zero (so that can't ever really be in a state) :param name:", "it's a valid sensor or action to prevent bad calls. :param message: :type", ":return: \"\"\" if degrees: r = r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" %", "we just use connect directly without creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close()", ":param rotation: :return: \"\"\" if x == 0: if y < 0: angle", "float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list of 11 (rows) x 16", "% (x, y)) else: rotation = self.get_rotation() if x != 0 and y", "hand, grabbing anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" %", "= self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal method to", "the hand relative to the agent :return: tupe(float, float) of the x, y", "z) elif adjusted == 90 or adjusted == -270: return z, 0 elif", ":param y: :param z: :param rotation: :return: \"\"\" if x == 0: if", "2)) * -1 elif adjusted < -180: nx = math.cos(radjusted * -1 -", "def __get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish and simplify :param x: :param", "variables (in case we just use connect directly without creating new PAGIWorld instance)", "\\ and not direction.upper() == 'RIGHT' and not direction.upper() == 'LEFT': raise ValueError(\"You", "if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in a valid PagiWorld variable", "= 180 else: angle = 0 elif y == 0: if x >", "if command == \"sensorRequest\" and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s'", "port self.__timeout = timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list()", "if x == 0: if y < 0: angle = 180 else: angle", "self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self): \"\"\" Resets agent state back to", "/ y) * 180 / math.pi + 180 else: angle = math.acos(z /", "if adjusted < 90: ny = math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2)", "-1 - 180) * z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny,", "\"\"\" Create a socket to the given :param ip: :param port: :return: :raises:", "Gets messages from the socket. If code is blank, then we just return", "% text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set a state within PAGIworld.", "z: :param rotation: :return: \"\"\" if x == 0: if y < 0:", "if socket doesn't return anything :param code: :type code: str :param block: :type", "\"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets messages from", "self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index == -1: break else: response", "a message to the socket. We make sure that the message is a", "agent :return: tupe(float, float) of the x, y coordinates of the hand \"\"\"", "agent some number of paces (defined as one width of his body) to", "secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command) + 1] if command", ":return: \"\"\" self.pagi_socket = None self.__ip_address = ip_address self.__port = port self.__timeout =", "agent is looking, thus +y is always in direction of top of agent,", "position of the hand relative to the agent :return: tupe(float, float) of the", ":param conditions: :param actions: :return: \"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" %", "def get_position(self): \"\"\" Gets the position of the hand relative to the agent", "if the message is for a sensor or action, that it's a valid", "+ 180 else: angle = math.acos(z / x) * 180 / math.pi +", "task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets the task to the one", "set to true, no exception will be thrown, but program will stop in", "'RIGHT' and not direction.upper() == 'LEFT': raise ValueError(\"You can only use a L", "z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\"", "PAGIworld. :param name: :param x: :param y: :param n: :return: \"\"\" if description", "< 0: if y > 0: angle = math.acos(z / y) * 180", "pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if", "then a RuntimeError will be raised. :raises: RuntimeError \"\"\" if self.__task_file == \"\"", "\"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\"", "\"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type", "response from the socket, after self.__timeout seconds, function will raise socket.timeout exception. If", "class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address: str :type __port: int :type", "but program will stop in this function if socket doesn't return anything :param", "False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self): \"\"\"", "was loaded in self.load_task. If one wasn't loaded, then a RuntimeError will be", "len(command) + 1] if command == \"sensorRequest\" and secondary not in VALID_SENSORS: raise", "self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal method to process", "states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\" Sets a", "(looking upward with hands in starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\"", "open a new socket connection\") def send_message(self, message): \"\"\" Send a message to", "% (name, image_file, x, y, m, ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object):", "Closes the hand, grabbing anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand)", "self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)] == code and", "in a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self):", "blank, then we just return the first message from the socket, otherwise return", "reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def", "direction.upper() == 'RIGHT' and not direction.upper() == 'LEFT': raise ValueError(\"You can only use", ":type __port: int :type __timeout: float :type __message_fragment: str :type __task_file: str :type", "size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def", "is world up, -y is world bottom, +x is world right and -x", "new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that we", "body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def", ":param ph: :param r: :param e: :param k: :param degrees: :return: \"\"\" if", ":raises: ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address", "raise RuntimeError(\"Invalid command found in the message '%s'\" % message) end = message[len(command)+1:].find(\",\")", "message '%s'\" % (secondary, message)) elif command == \"addForce\" and secondary not in", "if len(self.message_stack) > 0: if code != \"\": for index in range(len(self.message_stack)): if", "vision def center_hands(self): \"\"\" Moves both of the agent's hands to the center", "def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world def get_position(self):", "\"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self): \"\"\" Resets", "message): \"\"\" Send a message to the socket. We make sure that the", "to move his body. If absolute is False, then vectors are relative to", "just return the first message from the socket, otherwise return the first matching", "if message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\"", "self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\"", "\"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\",", "sensor '%s' in message '%s'\" % (secondary, message)) elif command == \"addForce\" and", "\"MIT\" import math import os import socket import time ERROR_CHECK = True VALID_COMMANDS", "upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the", "must pass in a valid PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand", "y < 0: angle = 180 else: angle = 0 elif y ==", "2) - math.pow(ny, 2)) * -1 elif adjusted < -180: nx = math.cos(radjusted", "range(0, 16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES =", "name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list of all the active reflexes", "\"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\"", "something solid, otherwise he'll do nothing. :return: bool True if agent has jumped", "0: angle = 180 else: angle = 0 elif y == 0: if", "drops into into PAGIworld. These items are the ones pre-built into PAGIworld. :param", ":return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if command ==", ":type absolute: bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y))", "nx = math.cos(radjusted * -1 - 90) * z * -1 ny =", "\"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\",", "direction of top of agent, -y is bottom, +x is towards his right", "blank or it matches something on the message stack :param code: :return: str", "starting position (looking upward with hands in starting position) :return: \"\"\" self.reset_rotation() def", "name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list", "\"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def", ":param y: :type y: float :param absolute: :type absolute: bool :return: \"\"\" x", "at '%s' was not found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file)", ":param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a", "to where he's looking. Therefore, if he's looking down at 180 degrees, and", "current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\" Moves both of the agent's hands", "if message_index == -1: break else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if", "and if it isn't, raise exception :param direction: :return: \"\"\" if not direction.upper()", "angle = 0 elif y == 0: if x > 0: angle =", "== \",\": return self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\"", "None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" %", "math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted < -270: ny =", "self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]),", "if code == \"\" or (response[:len(code)] == code and response[len(code)] == \",\"): break", "-1 cnt = 0 while cnt < paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2)", "direction: str :return: \"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper() == \"R\" else", "\"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\",", "reset internal variables (in case we just use connect directly without creating new", "2)) else: if adjusted < -90: ny = math.cos(radjusted * -1) * z", "absolute: bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else:", "range(0, 31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i", "thus +y is always in direction of top of agent, -y is bottom,", "elif adjusted < -270: ny = math.cos(radjusted * -1 - 180) * z", "a message from the stack if (1) the stack isn't empty and (2)", "pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\" Gets the", "number of paces (defined as one width of his body) to either the", "not degrees: val = val * 180. / math.pi if absolute: val %=", "self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt += 1 def send_force(self, x=0, y=0, absolute=False):", "relative to the agent :return: tupe(float, float) of the x, y coordinates of", "return response def __get_message_from_stack(self, code): \"\"\" Attempts to return a message from the", "90 else: if y < 0: angle = math.acos(z / y) * 180", "return vision def center_hands(self): \"\"\" Moves both of the agent's hands to the", "k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type", "tell him to rotate 90 degrees, if absolute is True, he'll be looking", "tupe(float, float) of the x, y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" %", "agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns", "i) VALID_SENSORS.append(\"R%d\" % i) for i in range(0, 31): for j in range(0,", "return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a list of ?x? points which", "current = list() current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\" Moves both of", "actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a", "lower left of the vision field with vision[10][15] representing upper right :return: list", "absolute: val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self,", "\"\"\" Resets the task to the one that was loaded in self.load_task. If", "elif adjusted == 180 or adjusted == -180: return 0, (-1 * z)", "holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes", "self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list of all the active reflexes in", "bool :return: \"\"\" if not degrees: val = val * 180. / math.pi", "- angle radjusted = adjusted * math.pi / 180 if adjusted == 0:", "periphal vision. vision[0][0] represents lower left of the vision field with vision[10][15] representing", "cnt += 1 def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a vector force", "'R' and not direction.upper() == 'L' \\ and not direction.upper() == 'RIGHT' and", "this function if socket doesn't return anything :param code: :type code: str :param", "(i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\",", "str :type __task_file: str :type message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3):", "val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation", "create_item(self, name, image_file, x, y, m, ph, r, e, k, degrees=True): \"\"\" Creates", "rotate to position specified from 0 (looking up), otherwise rotate him relative to", "the hand, releasing anything it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand)", "is world left. :param x: :type x: float :param y: :type y: float", "to be carried out on conditions. :param name: :param conditions: :param actions: :return:", "in range(0, 16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES", "absolute: :type absolute: bool :return: \"\"\" x = float(x) y = float(y) if", "API \"\"\" __author__ = \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\" __credits__ =", "\"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes the hand,", "absolute: bool :return: \"\"\" x = float(x) y = float(y) if not absolute", "= \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ =", "in PAGIworld with the specified properties :param name: :param image_file: :param x: :param", "* z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 elif adjusted", "message)) elif command == \"addForce\" and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force", "\"\"\" Moves both of the agent's hands to the center of his body", "remove_reflex(self, name): \"\"\" Removes a reflex completely from PAGIworld :param name: :return: \"\"\"", "self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\" Opens the hand, releasing", "% val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation in either degrees (0", "response != \"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index =", "% (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name):", "right and -x is world left. :param x: :type x: float :param y:", "degrees 0 90 agent 270 180 :param val: :type val: float :param degrees:", "\"\"\" Print text to the PAGIworld console window. :param text: :type text: str", "is False, he'll be looking to the right at 270 degrees 0 90", "of the x, y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response", "* z) elif adjusted == 90 or adjusted == -270: return z, 0", "angle = math.acos(z / x) * 180 / math.pi + 270 adjusted =", "grab(self): \"\"\" Closes the hand, grabbing anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\"", "after self.__timeout seconds, function will raise socket.timeout exception. If block is set to", "-1 - 90) * z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx,", "in the world :return: tuple(float, float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response", "Loads a task in PAGIworld. We additionally save the task file name so", "block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and response != \"\": while \"\\n\"", "or radians (0 - 2*pi) of agent (0 is looking upward) :param degrees:", "self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self): \"\"\" Resets agent", "program will stop in this function if socket doesn't return anything :param code:", "\"\"\" __author__ = \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"]", "== \",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self,", "def get_periphal_vision(self): \"\"\" Returns a list of 11 (rows) x 16 (columns) points", ":raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was not", "math.acos(z / x) * 180 / math.pi + 90 else: if y <", "self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket to PAGIWorld and then reset internal", "= math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if adjusted < -90: ny =", "absolute is True, he'll be looking to the left at 90 degrees and", "range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for i in range(0, 31): for", "RuntimeError(\"No open socket. Use connect() to open a new socket connection\") def send_message(self,", "ax = math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax ** 2 + ay", "y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" %", "direction.upper() == 'L' \\ and not direction.upper() == 'RIGHT' and not direction.upper() ==", "adjusted > 0: if adjusted < 90: ny = math.cos(radjusted) * z nx", "RuntimeError(\"Cannot reset task, no previous task file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\"", "a list of ?x? points which contains all of his detailed vision :return:", "move his body. If absolute is False, then vectors are relative to the", "an item and drops into into PAGIworld. These items are the ones pre-built", "0: angle = 270 else: angle = 90 elif x < 0: if", "__author__ = \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__", "absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction):", ":param y: :param n: :return: \"\"\" if description is None or description ==", "reflex completely from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def", "If absolute is True, then we rotate to position specified from 0 (looking", "into PAGIworld. :param name: :param x: :param y: :param n: :return: \"\"\" if", "response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self): \"\"\" Resets agent state", "if self.__task_file == \"\" or self.__task_file is None: raise RuntimeError(\"Cannot reset task, no", "hand, releasing anything it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\"", "self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the agent some number", "\"\" self.__task_file = \"\" self.message_stack = list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self)", "def reset_task(self): \"\"\" Resets the task to the one that was loaded in", "2) - math.pow(ny, 2)) else: nx = math.cos(radjusted * -1 - 270) *", "== \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord,", "internal variables (in case we just use connect directly without creating new PAGIWorld", "0, (-1 * z) elif adjusted == 90 or adjusted == -270: return", "paces: int :param direction: :type direction: str :return: \"\"\" assert_left_or_right(direction) val = 1", "list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord, y_coord,", "float(x) y = float(y) if not absolute or (x == 0 and y", "list of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response,", "(nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish and", "self.pagi_world = pagi_world def get_position(self): \"\"\" Gets the position of the hand relative", "z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted <", "== -180: return 0, (-1 * z) elif adjusted == 90 or adjusted", "currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def", "= port self.__timeout = timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack =", "= hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\" Gets the position of the", "that code, saving all other messages to a stack. If block is set", "PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name,", "center_hands(self): \"\"\" Moves both of the agent's hands to the center of his", "PAGIworld. :param name: :type name: str :param length: :type length: int :return: \"\"\"", "code): \"\"\" Attempts to return a message from the stack if (1) the", "task to the one that was loaded in self.load_task. If one wasn't loaded,", "file name so we can reset things if necessary :param task_file: :type task_file:", "True, then we rotate to position specified from 0 (looking up), otherwise rotate", "(0 is looking upward) :param degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response", "\"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\"", "vision.append(current) current = list() current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\" Moves both", "self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation in either degrees (0 - 359)", "are relative to the direction agent is looking, thus +y is always in", "be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\"", ":param image_file: :param x: :param y: :param m: :param ph: :param r: :param", "str :type message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip:", "= list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\"", "True, he'll be looking to the left at 90 degrees and if absolute", ":param code: :return: str \"\"\" if len(self.message_stack) > 0: if code != \"\":", "not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def", "looking, thus +y is always in direction of top of agent, -y is", "Therefore, if he's looking down at 180 degrees, and we tell him to", "Send a message to the socket. We make sure that the message is", "- 2*pi) of agent (0 is looking upward) :param degrees: :type degrees: bool", "\"\"\" Internal method to process returned vision repsonse. Splits the response into a", "\"addForce\" and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message '%s'\"", "'%s'\" % (secondary, message)) elif command == \"addForce\" and secondary not in VALID_FORCES:", ":param absolute: :type absolute: bool :return: \"\"\" x = float(x) y = float(y)", "the stack if (1) the stack isn't empty and (2) either code is", "\"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" %", "direction is either left or right, and if it isn't, raise exception :param", "represents lower left of the vision field with vision[10][15] representing upper right :return:", "he'll be looking to the right at 270 degrees 0 90 agent 270", "is True, he'll be looking to the left at 90 degrees and if", "0 (looking up), otherwise rotate him relative to where he's looking. Therefore, if", "* z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted < 180:", "representing upper right :return: list of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response", "0: vision.append(current) current = list() current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\" Moves", "timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket", "= True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\",", "was not found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self):", "!= 0: z = math.fabs(x) else: z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x,", "a list of 11 (rows) x 16 (columns) points which contains all of", "print_text(self, text): \"\"\" Print text to the PAGIworld console window. :param text: :type", "0: if y < 0: angle = 180 else: angle = 0 elif", "if his bottom edge is touching something solid, otherwise he'll do nothing. :return:", "self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None): \"\"\"", "(0 - 2*pi) of agent (0 is looking upward) :param degrees: :type degrees:", "y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation = self.get_rotation() if x", ":param ip: :param port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address", "the stack isn't empty and (2) either code is blank or it matches", "the agent's rotation back to 0 degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True)", "conditions, actions=None): \"\"\" Sets a reflex in PAGIworld to be carried out on", "for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in range(0,", "action, that it's a valid sensor or action to prevent bad calls. :param", "direction.upper() == 'LEFT': raise ValueError(\"You can only use a L or R value", "\"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\"", "y)) else: rotation = self.get_rotation() if x != 0 and y != 0:", "self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\" Attempts to return a message from", "y: :type y: float :param absolute: :type absolute: bool :return: \"\"\" if not", "% i) VALID_SENSORS.append(\"R%d\" % i) for i in range(0, 31): for j in", "to if his bottom edge is touching something solid, otherwise he'll do nothing.", "block: :type block: bool :return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response =", "* ay else: if x != 0: z = math.fabs(x) else: z =", "force to the hand moving it :param x: :type x: float :param y:", "reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates", "if degrees: rotation = rotation * 180 / math.pi return rotation def move_paces(self,", "180 / math.pi return rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts to move", "\"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"]", "= PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod", "0: if len(current) > 0: vision.append(current) current = list() current.append(response[j]) vision.append(current) return vision", ":return: :raises: RuntimeError \"\"\" if self.pagi_socket is None: raise RuntimeError(\"No open socket. Use", "pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes", "block=False): \"\"\" Gets messages from the socket. If code is blank, then we", "else: if y < 0: angle = math.acos(z / y) * 180 /", "secondary = message[len(command)+1:end + len(command) + 1] if command == \"sensorRequest\" and secondary", "absolute: bool :return: \"\"\" if not degrees: val = val * 180. /", "timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.connect(ip_address, port, timeout)", "math.fabs(x) else: z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx,", "return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list of 11 (rows) x", "x != 0 and y != 0: ax = math.fabs(x) ay = math.fabs(y)", "ip: :param port: :return: \"\"\" self.pagi_socket = None self.__ip_address = ip_address self.__port =", "y: :param z: :param rotation: :return: \"\"\" if x == 0: if y", "% (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a reflex completely from", "of his body) to either the left or right. :param paces: :type paces:", "RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if command == \"\" or", "= math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted < 180: nx = math.cos(radjusted", "absolute=False): \"\"\" Sends a vector of force to the hand moving it :param", "is a valid action type, as well verify that if the message is", "where each inner list is the length of specified column_length. :param response: :param", "to the direction agent is looking, thus +y is always in direction of", "elif adjusted < 180: nx = math.cos(radjusted - 90) * z ny =", "the PAGIworld console window. :param text: :type text: str :return: \"\"\" text =", "connect() to open a new socket connection\") def send_message(self, message): \"\"\" Send a", "== 'LEFT': raise ValueError(\"You can only use a L or R value for", "\"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"]", "\"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for", "+x is world right and -x is world left. :param x: :type x:", "stack isn't empty and (2) either code is blank or it matches something", "right at 270 degrees 0 90 agent 270 180 :param val: :type val:", "in message '%s'\" % (secondary, message)) # all messages must end with \\n", "= math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx, ny def get_position(self): \"\"\" Gets", ":raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if command == \"\"", "rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z,", "code, saving all other messages to a stack. If block is set to", "else: angle = math.acos(z / x) * 180 / math.pi + 90 else:", "/ math.pi + 90 else: if y < 0: angle = math.acos(z /", "simplify :param x: :param y: :param z: :param rotation: :return: \"\"\" if x", "then reset internal variables (in case we just use connect directly without creating", "% i) for i in range(0, 31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\"", "\"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets messages from the socket. If", "direction='L'): \"\"\" Attempts to move the agent some number of paces (defined as", "= 0 while cnt < paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt +=", "seconds, function will raise socket.timeout exception. If block is set to true, no", "code is blank or it matches something on the message stack :param code:", "if code != \"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and", "ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type", "False, he'll be looking to the right at 270 degrees 0 90 agent", "is set to true, no exception will be thrown, but program will stop", "image_file, x, y, m, ph, r, e, k, degrees=True): \"\"\" Creates a new", "some number of paces (defined as one width of his body) to either", "adjusted == 270 or adjusted == -90: return (-1 * z), 0 else:", "def assert_left_or_right(direction): \"\"\" Checks that the given direction is either left or right,", "if adjusted == 0: return 0, z elif adjusted == 180 or adjusted", "len(response)): if (j - 1) % column_length == 0: if len(current) > 0:", "of his body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld", "for i in range(0, 16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i,", "-1 elif adjusted < 270: ny = math.cos(radjusted - 180) * z *", "to a starting position (looking upward with hands in starting position) :return: \"\"\"", "-x is world left. :param x: :type x: float :param y: :type y:", "(self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks that", "that it's a valid sensor or action to prevent bad calls. :param message:", "90 elif x < 0: if y > 0: angle = math.acos(z /", "load_task(self, task_file): \"\"\" Loads a task in PAGIworld. We additionally save the task", "@staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish and simplify :param x:", "math.pi return rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts to move the agent", "__get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish and simplify :param x: :param y:", "that the given direction is either left or right, and if it isn't,", "looking to the right at 270 degrees 0 90 agent 270 180 :param", "degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1])", "in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0)", "j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in range(0, 16):", "- 270) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx,", "is touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1])", "absolute is true, then vector +y is world up, -y is world bottom,", "math.acos(z / y) * 180 / math.pi + 180 else: angle = math.acos(z", "math.cos(radjusted * -1 - 90) * z * -1 ny = math.sqrt(math.pow(z, 2)", "or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the message '%s'\"", "from the socket. If code is blank, then we just return the first", "in a valid PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l',", "== -90: return (-1 * z), 0 else: if adjusted > 0: if", "= timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.connect(ip_address, port,", "port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address", "list of all states that are currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\")", "time.sleep(2) cnt += 1 def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a vector", "180 else: angle = math.acos(z / x) * 180 / math.pi + 270", "response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a list of", ":type message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param", "x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks that the", "contains all of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return", "get_position(self): \"\"\" Gets x/y coordinates of the agent in the world :return: tuple(float,", "one that was loaded in self.load_task. If one wasn't loaded, then a RuntimeError", "nx = math.cos(radjusted - 270) * z * -1 ny = math.sqrt(math.pow(z, 2)", "the agent some number of paces (defined as one width of his body)", "code is blank, then we just return the first message from the socket,", "from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\"", "right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must", "def get_all_reflexes(self): \"\"\" Returns a list of all the active reflexes in PAGIworld", "2)) * -1 elif adjusted < 270: ny = math.cos(radjusted - 180) *", "% self.hand) def send_force(self, x, y, absolute=False): \"\"\" Sends a vector of force", "is towards his right side, -x is his left side. If absolute is", "will be thrown, but program will stop in this function if socket doesn't", "0 else: if adjusted > 0: if adjusted < 90: ny = math.cos(radjusted)", "then we just return the first message from the socket, otherwise return the", "if direction[0].upper() == \"R\" else -1 cnt = 0 while cnt < paces:", "at 180 degrees, and we tell him to rotate 90 degrees, if absolute", "secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message '%s'\" % (secondary,", "position (looking upward with hands in starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self):", "\"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\",", "Removes a reflex completely from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name)", "(-1 * z) elif adjusted == 90 or adjusted == -270: return z,", "11 (rows) x 16 (columns) points which contains all of his periphal vision.", "0 and y != 0: ax = math.fabs(x) ay = math.fabs(y) hyp =", "- math.pow(nx, 2)) else: if adjusted < -90: ny = math.cos(radjusted * -1)", "Gets the position of the hand relative to the agent :return: tupe(float, float)", "self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket to PAGIWorld and", "rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts to move the agent some number", "to PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world)", "be raised. :raises: RuntimeError \"\"\" if self.__task_file == \"\" or self.__task_file is None:", ":return: \"\"\" vision = list() current = list() for j in range(1, len(response)):", "= rotation - angle radjusted = adjusted * math.pi / 180 if adjusted", "self.__port = port self.__timeout = timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack", "== \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port self.__timeout =", "be carried out on conditions. :param name: :param conditions: :param actions: :return: \"\"\"", "not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\")", "+x is towards his right side, -x is his left side. If absolute", "180 / math.pi + 90 else: if y < 0: angle = math.acos(z", "agent some number of degrees/radians. If absolute is True, then we rotate to", "\"\"\" Gets messages from the socket. If code is blank, then we just", "return the first message from the socket, otherwise return the first matching message", "= math.sin(angle) * ay else: if x != 0: z = math.fabs(x) else:", ":return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions,", "None else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a task in PAGIworld.", "self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal", "the center of his body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type", "(columns) points which contains all of his periphal vision. vision[0][0] represents lower left", "is blank or it matches something on the message stack :param code: :return:", "* -1 - 90) * z * -1 ny = math.sqrt(math.pow(z, 2) -", "self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish and simplify :param", "code != \"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\", "in PAGIworld to be carried out on conditions. :param name: :param conditions: :param", "\"\"\" if not degrees: val = val * 180. / math.pi if absolute:", "the socket. We make sure that the message is a valid action type,", "the message '%s'\" % message) end = message[len(command)+1:].find(\",\") if end == -1: secondary", "else: nx = math.cos(radjusted - 270) * z * -1 ny = math.sqrt(math.pow(z,", "self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket to PAGIWorld and then reset", "name: :param image_file: :param x: :param y: :param m: :param ph: :param r:", "x: :type x: float :param y: :type y: float :param absolute: :type absolute:", "* z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1", "console window. :param text: :type text: str :return: \"\"\" text = str(text) self.send_message(\"print,%s\"", "< paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt += 1 def send_force(self, x=0,", "z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx, ny def get_position(self):", "self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket to", "nothing. :return: bool True if agent has jumped (his bottom is touching something", "90) * z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) *", "assert_left_or_right(direction) val = 1 if direction[0].upper() == \"R\" else -1 cnt = 0", "k: :param degrees: :return: \"\"\" if degrees: r = r * math.pi /", "x) * 180 / math.pi + 90 else: if y < 0: angle", "% self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\" Opens the hand, releasing anything", "of the agent in the world :return: tuple(float, float) of coordinates of agent", "270 degrees 0 90 agent 270 180 :param val: :type val: float :param", "(looking upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate", "self.hand) def send_force(self, x, y, absolute=False): \"\"\" Sends a vector of force to", "class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand", ":return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length):", "list of all the active reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes", "180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m, ph, r, e, k)) self.get_message(code=\"createItem\")", "= \"MIT\" import math import os import socket import time ERROR_CHECK = True", "a reflex in PAGIworld to be carried out on conditions. :param name: :param", "y=0, absolute=False): \"\"\" Sends a vector force to the agent to move his", "world right and -x is world left. :param x: :type x: float :param", "self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self):", "= self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)] == code", "else: nx = math.cos(radjusted * -1 - 270) * z ny = math.sqrt(math.pow(z,", "str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if command", "== 270 or adjusted == -90: return (-1 * z), 0 else: if", "don't, exception will be raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket is None:", "secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message '%s'\" % (secondary,", "self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\" TODO:", "radians (0 - 2*pi) of agent (0 is looking upward) :param degrees: :type", "no previous task file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text to", "previous task file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text to the", "rotation = float(response[-1]) rotation %= 360 if degrees: rotation = rotation * 180", "ph, r, e, k, degrees=True): \"\"\" Creates a new item in PAGIworld with", "math.cos(radjusted * -1 - 270) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx,", "self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes the hand, grabbing anything it is", "will raise socket.timeout exception. If block is set to true, no exception will", "to 0 degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True,", "able to if his bottom edge is touching something solid, otherwise he'll do", "1 if direction[0].upper() == \"R\" else -1 cnt = 0 while cnt <", ":return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\"", "degrees=True): \"\"\" Creates a new item in PAGIworld with the specified properties :param", "PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand =", "self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a list of ?x? points which contains", "\"\" or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the message", "\"\"\" Returns rotation in either degrees (0 - 359) or radians (0 -", "RuntimeError(\"Invalid force '%s' in message '%s'\" % (secondary, message)) # all messages must", ":param m: :param ph: :param r: :param e: :param k: :param degrees: :return:", "in this function if socket doesn't return anything :param code: :type code: str", "0: angle = math.acos(z / y) * 180 / math.pi else: angle =", "not found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\"", "right. :param paces: :type paces: int :param direction: :type direction: str :return: \"\"\"", "is either left or right, and if it isn't, raise exception :param direction:", "16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\",", ":param degrees: :return: \"\"\" if degrees: r = r * math.pi / 180.", "-1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted <", "def set_state(self, name, length): \"\"\" Set a state within PAGIworld. :param name: :type", "def __get_message_from_stack(self, code): \"\"\" Attempts to return a message from the stack if", "self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list", "from the stack if (1) the stack isn't empty and (2) either code", "adjusted == -90: return (-1 * z), 0 else: if adjusted > 0:", "exception will be raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket is None: raise", "message stack :param code: :return: str \"\"\" if len(self.message_stack) > 0: if code", "val, degrees=True, absolute=False): \"\"\" Rotate the agent some number of degrees/radians. If absolute", "-1 elif adjusted < -180: nx = math.cos(radjusted * -1 - 90) *", "so we can reset things if necessary :param task_file: :type task_file: str :raises:", "PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address: str :type __port: int :type __timeout:", "float :param absolute: :type absolute: bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" %", "x < 0: if y > 0: angle = math.acos(z / y) *", "adjusted == -270: return z, 0 elif adjusted == 270 or adjusted ==", "socket. We make sure that the message is a valid action type, as", "__task_file: str :type message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param", ":type y: float :param absolute: :type absolute: bool :return: \"\"\" if not absolute:", "looking upward) :param degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\")", "send_message(self, message): \"\"\" Send a message to the socket. We make sure that", "sensor or action to prevent bad calls. :param message: :type message: str :return:", "conditions. :param name: :param conditions: :param actions: :return: \"\"\" if actions is not", "self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a reflex completely from PAGIworld :param name:", "m, ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld", "in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self,", "90 agent 270 180 :param val: :type val: float :param degrees: :type degrees:", "message with that code, saving all other messages to a stack. If block", "self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\" Opens", "e, k, degrees=True): \"\"\" Creates a new item in PAGIworld with the specified", "vision = list() current = list() for j in range(1, len(response)): if (j", "y, z, rotation): \"\"\" TODO: Finish and simplify :param x: :param y: :param", "bottom, +x is world right and -x is world left. :param x: :type", "math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted <", "2) - math.pow(ny, 2)) elif adjusted < 180: nx = math.cos(radjusted - 90)", ":param task_file: :type task_file: str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task", "if adjusted < -90: ny = math.cos(radjusted * -1) * z nx =", "his body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\"", "absolute: :type absolute: bool :return: \"\"\" if not degrees: val = val *", "be thrown, but program will stop in this function if socket doesn't return", "side, -x is his left side. If absolute is true, then vector +y", "self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets the task to the one that", "of top of agent, -y is bottom, +x is towards his right side,", "(so that can't ever really be in a state) :param name: :return: \"\"\"", "his body. If absolute is False, then vectors are relative to the direction", "self.__timeout seconds, function will raise socket.timeout exception. If block is set to true,", "and y != 0: ax = math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax", "message '%s'\" % (secondary, message)) # all messages must end with \\n if", "vector +y is world up, -y is world bottom, +x is world right", "= message[len(command)+1:].find(\",\") if end == -1: secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end", "given direction is either left or right, and if it isn't, raise exception", "task in PAGIworld. We additionally save the task file name so we can", "return z, 0 elif adjusted == 270 or adjusted == -90: return (-1", "math.pow(ny, 2)) else: nx = math.cos(radjusted * -1 - 270) * z ny", "\"\"\" Python PAGIworld API \"\"\" __author__ = \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR", "\"\"\" Returns a list of all the active reflexes in PAGIworld :return: list", "float :param y: :type y: float :param absolute: :type absolute: bool :return: \"\"\"", "message[len(command)+1:end + len(command) + 1] if command == \"sensorRequest\" and secondary not in", "# all messages must end with \\n if message[-1] != \"\\n\": message +=", "self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the agent to try and", "raise RuntimeError(\"No open socket. Use connect() to open a new socket connection\") def", "socket connection. If we don't, exception will be raised. :return: :raises: RuntimeError \"\"\"", "adjusted == 0: return 0, z elif adjusted == 180 or adjusted ==", "+= \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets messages from the socket.", "can't ever really be in a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" %", "points which contains all of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response =", "(name, image_file, x, y, m, ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\"", "0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation = self.get_rotation()", "y: :type y: float :param absolute: :type absolute: bool :return: \"\"\" x =", "message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command) + 1] if command == \"sensorRequest\"", ":type __timeout: float :type __message_fragment: str :type __task_file: str :type message_stack: list \"\"\"", "= self.__message_fragment.find(\"\\n\") if message_index == -1: break else: response = self.__message_fragment[:message_index] self.__message_fragment =", "\"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360 if degrees:", "\"\"\" Make sure that we have an existing socket connection. If we don't,", "name: :param conditions: :param actions: :return: \"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\"", "response def __get_message_from_stack(self, code): \"\"\" Attempts to return a message from the stack", "agent's hands to the center of his body :return: \"\"\" raise NotImplementedError class", "ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port", "top of agent, -y is bottom, +x is towards his right side, -x", "found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text to the PAGIworld console window.", "in either degrees (0 - 359) or radians (0 - 2*pi) of agent", "180 else: angle = 0 elif y == 0: if x > 0:", "* math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m, ph, r,", "self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self):", "rotate 90 degrees, if absolute is True, he'll be looking to the left", "int :type __timeout: float :type __message_fragment: str :type __task_file: str :type message_stack: list", "ip: :param port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address =", "RuntimeError(\"Invalid command found in the message '%s'\" % message) end = message[len(command)+1:].find(\",\") if", "== 'R' and not direction.upper() == 'L' \\ and not direction.upper() == 'RIGHT'", ":param y: :param m: :param ph: :param r: :param e: :param k: :param", "x: :param y: :param n: :return: \"\"\" if description is None or description", "PAGIworld with the specified properties :param name: :param image_file: :param x: :param y:", "self.pagi_socket is None: raise RuntimeError(\"No open socket. Use connect() to open a new", "\"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i in", "Set a state within PAGIworld. :param name: :type name: str :param length: :type", "vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def __process_vision(response,", "\"\"\" if len(self.message_stack) > 0: if code != \"\": for index in range(len(self.message_stack)):", ":return: \"\"\" if x == 0: if y < 0: angle = 180", "response = self.__get_message_from_stack(code) while True and response != \"\": while \"\\n\" not in", "pylint: disable=too-many-arguments def create_item(self, name, image_file, x, y, m, ph, r, e, k,", "description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord,", "= self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a list of ?x?", "to the right at 270 degrees 0 90 agent 270 180 :param val:", "message is a valid action type, as well verify that if the message", "Splits the response into a list of lists where each inner list is", "+ len(command) + 1] if command == \"sensorRequest\" and secondary not in VALID_SENSORS:", "degrees: bool :param absolute: :type absolute: bool :return: \"\"\" if not degrees: val", "it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self,", "< 90: ny = math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny,", "self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal method to process returned vision", "necessary :param task_file: :type task_file: str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise", "\"\"\" Sets a reflex in PAGIworld to be carried out on conditions. :param", "z = math.fabs(x) else: z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z,", "be raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket is None: raise RuntimeError(\"No open", "str :param block: :type block: bool :return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True)", "% (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name, image_file,", "** 2 + ay ** 2) angle = math.acos(ay / hyp) z =", ":return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes the", "< 0: angle = math.acos(z / y) * 180 / math.pi + 180", "z = math.sin(angle) * ay else: if x != 0: z = math.fabs(x)", ":type __task_file: str :type message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\"", "180 / math.pi + 270 adjusted = rotation - angle radjusted = adjusted", "and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message '%s'\" %", "== \"sensorRequest\" and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message", "timeout=3): \"\"\" :param ip: :param port: :return: \"\"\" self.pagi_socket = None self.__ip_address =", "block is set to False, and there's no response from the socket, after", "None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def", "a sensor or action, that it's a valid sensor or action to prevent", "message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if", "into into PAGIworld. These items are the ones pre-built into PAGIworld. :param name:", "hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2])", "self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y, absolute=False): \"\"\" Sends a vector", "with the specified properties :param name: :param image_file: :param x: :param y: :param", "?x? points which contains all of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response", "task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets the task", "return a message from the stack if (1) the stack isn't empty and", "PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\"", "text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set", "ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if adjusted < -90: ny", "def get_position(self): \"\"\" Gets x/y coordinates of the agent in the world :return:", "return self.__process_vision(response, 21) @staticmethod def __process_vision(response, column_length): \"\"\" Internal method to process returned", "if y > 0: angle = math.acos(z / y) * 180 / math.pi", "bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass", "force '%s' in message '%s'\" % (secondary, message)) # all messages must end", "use connect directly without creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self):", "+ 90 else: if y < 0: angle = math.acos(z / y) *", "nx = math.cos(radjusted * -1 - 270) * z ny = math.sqrt(math.pow(z, 2)", "0: if y > 0: angle = math.acos(z / y) * 180 /", "else -1 cnt = 0 while cnt < paces: self.send_force(x=(val * 1000), absolute=True)", "specified from 0 (looking up), otherwise rotate him relative to where he's looking.", "= 270 else: angle = 90 elif x < 0: if y >", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket to", "while \"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index", "his bottom edge is touching something solid, otherwise he'll do nothing. :return: bool", "if y < 0: angle = math.acos(z / y) * 180 / math.pi", "math.pow(nx, 2)) return nx, ny def get_position(self): \"\"\" Gets x/y coordinates of the", "conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a reflex completely from PAGIworld :param", "end with \\n if message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self,", "= math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" %", "\"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description))", "must end with \\n if message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def", "\"\"\" Resets agent state back to a starting position (looking upward with hands", "the agent :return: tupe(float, float) of the x, y coordinates of the hand", "all states that are currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states =", "an existing socket connection. If we don't, exception will be raised. :return: :raises:", ":param block: :type block: bool :return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response", "adjusted < -270: ny = math.cos(radjusted * -1 - 180) * z *", "VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i in range(5):", "stack if (1) the stack isn't empty and (2) either code is blank", "(name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a reflex completely from PAGIworld", "block is set to true, no exception will be thrown, but program will", "left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld):", "hands to the center of his body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object):", "VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message '%s'\" % (secondary, message)) # all", "= [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\"", "of the agent's hands to the center of his body :return: \"\"\" raise", "else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a task in PAGIworld. We", "Internal method to process returned vision repsonse. Splits the response into a list", "- math.pow(ny, 2)) elif adjusted < 180: nx = math.cos(radjusted - 90) *", ":raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and response", "x 16 (columns) points which contains all of his periphal vision. vision[0][0] represents", "0 degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False):", "< -270: ny = math.cos(radjusted * -1 - 180) * z * -1", "self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that we have an existing socket connection.", "self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\" Opens the hand, releasing anything it", "[\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\",", "def remove_reflex(self, name): \"\"\" Removes a reflex completely from PAGIworld :param name: :return:", ":type message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")]", "!= \"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)]", "y: :param m: :param ph: :param r: :param e: :param k: :param degrees:", "self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360 if degrees: rotation = rotation *", "absolute or (x == 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y))", "@staticmethod def __process_vision(response, column_length): \"\"\" Internal method to process returned vision repsonse. Splits", "exception. If block is set to true, no exception will be thrown, but", ":param name: :type name: str :param length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\"", "def drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates an item and drops into", "process returned vision repsonse. Splits the response into a list of lists where", ":return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and", "found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets", "elif y == 0: if x > 0: angle = 270 else: angle", "if x != 0: z = math.fabs(x) else: z = math.fabs(y) nx, ny", "wasn't loaded, then a RuntimeError will be raised. :raises: RuntimeError \"\"\" if self.__task_file", "False, and there's no response from the socket, after self.__timeout seconds, function will", "ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket to the given :param ip: :param", "\"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] ==", "i in range(0, 31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j))", "y) * 180 / math.pi else: angle = math.acos(z / x) * 180", "PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand =", "16) def get_detailed_vision(self): \"\"\" Returns a list of ?x? points which contains all", "the length of specified column_length. :param response: :param column_length: :return: \"\"\" vision =", "connection\") def send_message(self, message): \"\"\" Send a message to the socket. We make", "= ip_address self.__port = port self.__timeout = timeout self.__message_fragment = \"\" self.__task_file =", "x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\"", "k, degrees=True): \"\"\" Creates a new item in PAGIworld with the specified properties", "or adjusted == -180: return 0, (-1 * z) elif adjusted == 90", "assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\" Gets the position", "j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\",", "adjusted < -180: nx = math.cos(radjusted * -1 - 90) * z *", "in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\",", "elif command == \"addForce\" and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s'", "drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates an item and drops into into", "(j - 1) % column_length == 0: if len(current) > 0: vision.append(current) current", "in direction of top of agent, -y is bottom, +x is towards his", "\"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def", "reset_agent(self): \"\"\" Resets agent state back to a starting position (looking upward with", ":param port: :return: \"\"\" self.pagi_socket = None self.__ip_address = ip_address self.__port = port", "float :type __message_fragment: str :type __task_file: str :type message_stack: list \"\"\" def __init__(self,", "def __assert_open_socket(self): \"\"\" Make sure that we have an existing socket connection. If", "== code and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None else: return", "cnt < paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt += 1 def send_force(self,", "/ math.pi + 180 else: angle = math.acos(z / x) * 180 /", "of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21)", "touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y,", "math.cos(radjusted - 90) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) *", "math.cos(radjusted - 180) * z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny,", "get_position(self): \"\"\" Gets the position of the hand relative to the agent :return:", "rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the agent some number of degrees/radians. If", "elif adjusted == 270 or adjusted == -90: return (-1 * z), 0", "- 180) * z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2))", "\"\"\" Returns a list of ?x? points which contains all of his detailed", "= math.fabs(y) hyp = math.sqrt(ax ** 2 + ay ** 2) angle =", ":param n: :return: \"\"\" if description is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\"", "get_detailed_vision(self): \"\"\" Returns a list of ?x? points which contains all of his", ":return: list of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return", "\"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address: str", "math.pi / 180 if adjusted == 0: return 0, z elif adjusted ==", "y == 0: if x > 0: angle = 270 else: angle =", "and jump. He will only be able to if his bottom edge is", "pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world =", "message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets messages from the", "absolute is False, he'll be looking to the right at 270 degrees 0", "bottom is touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return", "looking. Therefore, if he's looking down at 180 degrees, and we tell him", "is looking upward) :param degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response =", "world left. :param x: :type x: float :param y: :type y: float :param", "and not direction.upper() == 'L' \\ and not direction.upper() == 'RIGHT' and not", "vector of force to the hand moving it :param x: :type x: float", "coordinates of the agent in the world :return: tuple(float, float) of coordinates of", "socket to PAGIWorld and then reset internal variables (in case we just use", "Returns a list of ?x? points which contains all of his detailed vision", "response: :param column_length: :return: \"\"\" vision = list() current = list() for j", "self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)] == code and response[len(code)]", "math.acos(z / y) * 180 / math.pi else: angle = math.acos(z / x)", "x_coord, y_coord, description=None): \"\"\" Creates an item and drops into into PAGIworld. These", "\"\"\" vision = list() current = list() for j in range(1, len(response)): if", "width of his body) to either the left or right. :param paces: :type", ":return: \"\"\" x = float(x) y = float(y) if not absolute or (x", "r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand:", "message '%s'\" % message) end = message[len(command)+1:].find(\",\") if end == -1: secondary =", "a list of lists where each inner list is the length of specified", "\"\" or (response[:len(code)] == code and response[len(code)] == \",\"): break else: self.message_stack.append(response) if", "== \"\" or self.__task_file is None: raise RuntimeError(\"Cannot reset task, no previous task", "text): \"\"\" Print text to the PAGIworld console window. :param text: :type text:", "else: rotation = self.get_rotation() if x != 0 and y != 0: ax", "could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self):", "of degrees/radians. If absolute is True, then we rotate to position specified from", "is world bottom, +x is world right and -x is world left. :param", "the agent's hands to the center of his body :return: \"\"\" raise NotImplementedError", "False, then vectors are relative to the direction agent is looking, thus +y", "\"\"\" Returns a list of all states that are currently in PAGIworld. :return:", "self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a task in PAGIworld. We additionally save", "message: :type message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK: command =", "command = message[:message.find(\",\")] if command == \"\" or command not in VALID_COMMANDS: raise", "inner list is the length of specified column_length. :param response: :param column_length: :return:", "be looking to the right at 270 degrees 0 90 agent 270 180", "he's looking down at 180 degrees, and we tell him to rotate 90", "= pagi_world def get_position(self): \"\"\" Gets the position of the hand relative to", ":type __message_fragment: str :type __task_file: str :type message_stack: list \"\"\" def __init__(self, ip_address=\"\",", ":param response: :param column_length: :return: \"\"\" vision = list() current = list() for", "loaded, then a RuntimeError will be raised. :raises: RuntimeError \"\"\" if self.__task_file ==", "val: :type val: float :param degrees: :type degrees: bool :param absolute: :type absolute:", "list() current = list() for j in range(1, len(response)): if (j - 1)", "= timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.pagi_socket =", "otherwise return the first matching message with that code, saving all other messages", "send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a vector force to the agent to", "length): \"\"\" Set a state within PAGIworld. :param name: :type name: str :param", "raise exception :param direction: :return: \"\"\" if not direction.upper() == 'R' and not", "both of the agent's hands to the center of his body :return: \"\"\"", "self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks that the given direction is either", "* 180 / math.pi + 90 else: if y < 0: angle =", ":type paces: int :param direction: :type direction: str :return: \"\"\" assert_left_or_right(direction) val =", "* -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 else: nx", "the message is a valid action type, as well verify that if the", "name: str :param length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length))", "y < 0: angle = math.acos(z / y) * 180 / math.pi +", "body. If absolute is False, then vectors are relative to the direction agent", "rotation in either degrees (0 - 359) or radians (0 - 2*pi) of", "270 adjusted = rotation - angle radjusted = adjusted * math.pi / 180", "\"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and response != \"\":", "x = float(x) y = float(y) if not absolute or (x == 0", ":return: bool True if agent has jumped (his bottom is touching something solid)", "absolute: :type absolute: bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x,", "+y is world up, -y is world bottom, +x is world right and", "- 359) or radians (0 - 2*pi) of agent (0 is looking upward)", "and then reset internal variables (in case we just use connect directly without", "r, e, k, degrees=True): \"\"\" Creates a new item in PAGIworld with the", "self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\")", "without creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure", "of force to the hand moving it :param x: :type x: float :param", "ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted < 270:", "** 2) angle = math.acos(ay / hyp) z = math.sin(angle) * ay else:", "(secondary, message)) # all messages must end with \\n if message[-1] != \"\\n\":", "0 elif y == 0: if x > 0: angle = 270 else:", "ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted < -270:", "!= \"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\")", "rotation back to 0 degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self,", "are currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:]", "rotation %= 360 if degrees: rotation = rotation * 180 / math.pi return", "% task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets the", "the agent to move his body. If absolute is False, then vectors are", "ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port self.__timeout", "to move the agent some number of paces (defined as one width of", ":return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list of", "These items are the ones pre-built into PAGIworld. :param name: :param x: :param", "e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand", "def set_reflex(self, name, conditions, actions=None): \"\"\" Sets a reflex in PAGIworld to be", "'%s' in message '%s'\" % (secondary, message)) # all messages must end with", "float(y) if not absolute or (x == 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\"", "agent (0 is looking upward) :param degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\")", "y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x,", "looking to the left at 90 degrees and if absolute is False, he'll", "to open a new socket connection\") def send_message(self, message): \"\"\" Send a message", "conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes", ":return: tuple(float, float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return", "i in range(0, 16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j))", "- 90) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1", "def load_task(self, task_file): \"\"\" Loads a task in PAGIworld. We additionally save the", "is world right and -x is world left. :param x: :type x: float", "= math.acos(z / y) * 180 / math.pi + 180 else: angle =", "__assert_open_socket(self): \"\"\" Make sure that we have an existing socket connection. If we", "PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns", "def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the agent some number of degrees/radians.", "(secondary, message)) elif command == \"addForce\" and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid", "\"\"\" self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if command == \"\" or command", "PAGIworld API \"\"\" __author__ = \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\" __credits__", "= message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command) + 1] if command ==", "m: :param ph: :param r: :param e: :param k: :param degrees: :return: \"\"\"", "= math.fabs(x) else: z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation)", "message_index = self.__message_fragment.find(\"\\n\") if message_index == -1: break else: response = self.__message_fragment[:message_index] self.__message_fragment", "Returns a list of all states that are currently in PAGIworld. :return: list", "raise RuntimeError(\"Invalid sensor '%s' in message '%s'\" % (secondary, message)) elif command ==", ":param x: :param y: :param m: :param ph: :param r: :param e: :param", "True if agent has jumped (his bottom is touching something solid) otherwise False", "return rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts to move the agent some", "the right at 270 degrees 0 90 agent 270 180 :param val: :type", "270) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx, ny", "def center_hands(self): \"\"\" Moves both of the agent's hands to the center of", "left of the vision field with vision[10][15] representing upper right :return: list of", "def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a vector force to the agent", "first matching message with that code, saving all other messages to a stack.", "is None: raise RuntimeError(\"No open socket. Use connect() to open a new socket", "(x == 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation", "name, image_file, x, y, m, ph, r, e, k, degrees=True): \"\"\" Creates a", "\"\"\" \"Removes\" states from PAGIworld by just setting it's duration to zero (so", "int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\"", "float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2])", "os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was not found\" % task_file) self.__task_file =", "the socket, otherwise return the first matching message with that code, saving all", "self.get_rotation() if x != 0 and y != 0: ax = math.fabs(x) ay", "adjusted < 180: nx = math.cos(radjusted - 90) * z ny = math.sqrt(math.pow(z,", "paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt += 1 def send_force(self, x=0, y=0,", "for j in range(1, len(response)): if (j - 1) % column_length == 0:", "y, m, ph, r, e, k, degrees=True): \"\"\" Creates a new item in", ":param port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname())", "(i, j)) for i in range(0, 16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\"", "(looking up), otherwise rotate him relative to where he's looking. Therefore, if he's", "\"\"\" Attempts to move the agent some number of paces (defined as one", "one width of his body) to either the left or right. :param paces:", "break else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\" or", "* -1 elif adjusted < -180: nx = math.cos(radjusted * -1 - 90)", "disable=too-many-arguments def create_item(self, name, image_file, x, y, m, ph, r, e, k, degrees=True):", "360 if degrees: rotation = rotation * 180 / math.pi return rotation def", "doesn't return anything :param code: :type code: str :param block: :type block: bool", "to the PAGIworld console window. :param text: :type text: str :return: \"\"\" text", "a socket to the given :param ip: :param port: :return: :raises: ConnectionRefusedError \"\"\"", "if not direction.upper() == 'R' and not direction.upper() == 'L' \\ and not", "z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if adjusted", "if x != 0 and y != 0: ax = math.fabs(x) ay =", "will be raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket is None: raise RuntimeError(\"No", "something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1", "either degrees (0 - 359) or radians (0 - 2*pi) of agent (0", "\"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object):", "socket. Use connect() to open a new socket connection\") def send_message(self, message): \"\"\"", "y, m, ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world:", "agent 270 180 :param val: :type val: float :param degrees: :type degrees: bool", "code and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None else: return self.message_stack.pop(0)", "touching something solid, otherwise he'll do nothing. :return: bool True if agent has", "1) % column_length == 0: if len(current) > 0: vision.append(current) current = list()", "agent in the world :return: tuple(float, float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\")", "nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 elif adjusted < -180:", "his periphal vision. vision[0][0] represents lower left of the vision field with vision[10][15]", "degrees (0 - 359) or radians (0 - 2*pi) of agent (0 is", "of 11 (rows) x 16 (columns) points which contains all of his periphal", "agent's rotation back to 0 degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True) def", "== 'RIGHT' and not direction.upper() == 'LEFT': raise ValueError(\"You can only use a", "return (-1 * z), 0 else: if adjusted > 0: if adjusted <", "of the hand relative to the agent :return: tupe(float, float) of the x,", "task file name so we can reset things if necessary :param task_file: :type", "= float(response[-1]) rotation %= 360 if degrees: rotation = rotation * 180 /", "tuple(float, float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]),", ":return: tupe(float, float) of the x, y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\"", "nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 else: nx = math.cos(radjusted", "== -1: break else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code ==", ":param absolute: :type absolute: bool :return: \"\"\" if not degrees: val = val", "left. :param x: :type x: float :param y: :type y: float :param absolute:", "down at 180 degrees, and we tell him to rotate 90 degrees, if", "paces (defined as one width of his body) to either the left or", "to the socket. We make sure that the message is a valid action", "\"\"\" if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port =", "PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise", "try and jump. He will only be able to if his bottom edge", "< -90: ny = math.cos(radjusted * -1) * z nx = math.sqrt(math.pow(z, 2)", "= math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx = math.cos(radjusted * -1 -", "21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in range(0, 16): for j in", "val * 180. / math.pi if absolute: val %= 360. val -= self.get_rotation()", ":type __ip_address: str :type __port: int :type __timeout: float :type __message_fragment: str :type", "valid sensor or action to prevent bad calls. :param message: :type message: str", "str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was", "math.pow(ny, 2)) * -1 elif adjusted < -180: nx = math.cos(radjusted * -1", ":type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world):", "list is the length of specified column_length. :param response: :param column_length: :return: \"\"\"", "in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message '%s'\" % (secondary, message)) #", "upward with hands in starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets", "of specified column_length. :param response: :param column_length: :return: \"\"\" vision = list() current", "bool :return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True", "set_reflex(self, name, conditions, actions=None): \"\"\" Sets a reflex in PAGIworld to be carried", "str :return: \"\"\" text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name,", "self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a task", "Causes the agent to try and jump. He will only be able to", "self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation = self.get_rotation() if x != 0 and", "him relative to where he's looking. Therefore, if he's looking down at 180", "\"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world def", "name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list of all states that are", "90 degrees and if absolute is False, he'll be looking to the right", "* -1 else: nx = math.cos(radjusted - 270) * z * -1 ny", "range(0, 11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\",", "existing socket connection. If we don't, exception will be raised. :return: :raises: RuntimeError", "def reset_rotation(self): \"\"\" Resets the agent's rotation back to 0 degrees (looking upward)", "if degrees: r = r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file,", "ny def get_position(self): \"\"\" Gets x/y coordinates of the agent in the world", "import os import socket import time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\",", "the socket. If code is blank, then we just return the first message", "socket. If code is blank, then we just return the first message from", "0: if adjusted < 90: ny = math.cos(radjusted) * z nx = math.sqrt(math.pow(z,", "break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\"", "rotation * 180 / math.pi return rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts", "adjusted = rotation - angle radjusted = adjusted * math.pi / 180 if", "* 1000), absolute=True) time.sleep(2) cnt += 1 def send_force(self, x=0, y=0, absolute=False): \"\"\"", "and -x is world left. :param x: :type x: float :param y: :type", "and there's no response from the socket, after self.__timeout seconds, function will raise", "task file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text to the PAGIworld", "2) - math.pow(nx, 2)) else: if adjusted < -90: ny = math.cos(radjusted *", "get_message(self, code=\"\", block=False): \"\"\" Gets messages from the socket. If code is blank,", "= list() for j in range(1, len(response)): if (j - 1) % column_length", ":type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You", "message from the stack if (1) the stack isn't empty and (2) either", "self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def load_task(self, task_file):", "agent has jumped (his bottom is touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\")", "actions: :return: \"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions))", "% (secondary, message)) # all messages must end with \\n if message[-1] !=", "2) - math.pow(ny, 2)) * -1 else: nx = math.cos(radjusted - 270) *", "return anything :param code: :type code: str :param block: :type block: bool :return:", "<reponame>RAIRLab/PAGIapi-python<gh_stars>0 \"\"\" Python PAGIworld API \"\"\" __author__ = \"<NAME>\" __copyright__ = \"Copyright 2015,", "= math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 elif adjusted < -180: nx", "self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y, absolute=False): \"\"\" Sends a vector of", "== 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation = self.get_rotation() if x !=", "make sure that the message is a valid action type, as well verify", ":param x: :type x: float :param y: :type y: float :param absolute: :type", "== code and response[len(code)] == \",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout)", "description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name, image_file, x, y, m, ph,", "description is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else:", ":param e: :param k: :param degrees: :return: \"\"\" if degrees: r = r", "or (x == 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else:", "= math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted < 270: ny", "contains all of his periphal vision. vision[0][0] represents lower left of the vision", "from 0 (looking up), otherwise rotate him relative to where he's looking. Therefore,", "def reset_agent(self): \"\"\" Resets agent state back to a starting position (looking upward", "releasing anything it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" %", "port, timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a", "it :param x: :type x: float :param y: :type y: float :param absolute:", "name): \"\"\" \"Removes\" states from PAGIworld by just setting it's duration to zero", "self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\" Sets a reflex in", "angle = 270 else: angle = 90 elif x < 0: if y", "a RuntimeError will be raised. :raises: RuntimeError \"\"\" if self.__task_file == \"\" or", "is touching something solid, otherwise he'll do nothing. :return: bool True if agent", "\"\"\" if self.pagi_socket is None: raise RuntimeError(\"No open socket. Use connect() to open", "math.acos(ay / hyp) z = math.sin(angle) * ay else: if x != 0:", "number of degrees/radians. If absolute is True, then we rotate to position specified", "= \"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\" import math", "\"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class", "-1 - 270) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return", "degrees and if absolute is False, he'll be looking to the right at", "str :param length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\")", ":type task_file: str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file at", "= 90 elif x < 0: if y > 0: angle = math.acos(z", "world :return: tuple(float, float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\")", "self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\" Attempts to", "math.sqrt(ax ** 2 + ay ** 2) angle = math.acos(ay / hyp) z", "of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16)", "adjusted == 90 or adjusted == -270: return z, 0 elif adjusted ==", "return reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates an item and", "math.pow(nx, 2)) else: if adjusted < -90: ny = math.cos(radjusted * -1) *", "with vision[10][15] representing upper right :return: list of size 11 x 16 \"\"\"", "direction[0].upper() == \"R\" else -1 cnt = 0 while cnt < paces: self.send_force(x=(val", "and simplify :param x: :param y: :param z: :param rotation: :return: \"\"\" if", "list of 11 (rows) x 16 (columns) points which contains all of his", "(name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name, image_file, x,", "empty and (2) either code is blank or it matches something on the", "back to a starting position (looking upward with hands in starting position) :return:", "bool :param absolute: :type absolute: bool :return: \"\"\" if not degrees: val =", "- math.pow(nx, 2)) * -1 elif adjusted < 270: ny = math.cos(radjusted -", "not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in a valid PagiWorld variable to", ":return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y, absolute=False):", "in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for i in range(0, 31):", "and response != \"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index", "and response[len(code)] == \",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response", "set_state(self, name, length): \"\"\" Set a state within PAGIworld. :param name: :type name:", "if adjusted > 0: if adjusted < 90: ny = math.cos(radjusted) * z", "not direction.upper() == 'R' and not direction.upper() == 'L' \\ and not direction.upper()", "x: :param y: :param m: :param ph: :param r: :param e: :param k:", "-180: return 0, (-1 * z) elif adjusted == 90 or adjusted ==", "self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index == -1: break else:", "\"R\" else -1 cnt = 0 while cnt < paces: self.send_force(x=(val * 1000),", "starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's rotation back", "z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif", "we can reset things if necessary :param task_file: :type task_file: str :raises: FileNotFoundError", "val = 1 if direction[0].upper() == \"R\" else -1 cnt = 0 while", "self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list of all states that are currently", ":return: \"\"\" if description is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name,", "2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\" import math import os", "if description is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord))", "saving all other messages to a stack. If block is set to False,", "% name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list of all the active", "% self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\"", "0: z = math.fabs(x) else: z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y,", ":type text: str :return: \"\"\" text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def", "* 180 / math.pi else: angle = math.acos(z / x) * 180 /", "actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name,", "2)) return nx, ny def get_position(self): \"\"\" Gets x/y coordinates of the agent", "task, no previous task file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text", "* z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx", "% (name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld by", "really be in a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\")", "-270: return z, 0 elif adjusted == 270 or adjusted == -90: return", "x, y, absolute=False): \"\"\" Sends a vector of force to the hand moving", "Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\" import math import os import socket", "will be raised. :raises: RuntimeError \"\"\" if self.__task_file == \"\" or self.__task_file is", "position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's rotation back to", "if he's looking down at 180 degrees, and we tell him to rotate", "the message stack :param code: :return: str \"\"\" if len(self.message_stack) > 0: if", "self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates an item", "only be able to if his bottom edge is touching something solid, otherwise", "else: if adjusted < -90: ny = math.cos(radjusted * -1) * z nx", "to the one that was loaded in self.load_task. If one wasn't loaded, then", "right :return: list of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\")", "self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld by just setting it's", "rotation): \"\"\" TODO: Finish and simplify :param x: :param y: :param z: :param", "center of his body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world:", "\"Removes\" states from PAGIworld by just setting it's duration to zero (so that", "while cnt < paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt += 1 def", "* 180. / math.pi if absolute: val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\"", "self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the", ":param direction: :return: \"\"\" if not direction.upper() == 'R' and not direction.upper() ==", "%= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\"", ":return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360 if", "def release(self): \"\"\" Opens the hand, releasing anything it could be holding :return:", "\"\"\" Rotate the agent some number of degrees/radians. If absolute is True, then", "180) * z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) *", "1000), absolute=True) time.sleep(2) cnt += 1 def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends", "(0 - 359) or radians (0 - 2*pi) of agent (0 is looking", "state back to a starting position (looking upward with hands in starting position)", "NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand)", "== \"\" or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the", "and drops into into PAGIworld. These items are the ones pre-built into PAGIworld.", "math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx,", "for a sensor or action, that it's a valid sensor or action to", "\"\"\" self.pagi_socket = None self.__ip_address = ip_address self.__port = port self.__timeout = timeout", "VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in range(0, 16): for j in range(0,", "be looking to the left at 90 degrees and if absolute is False,", "= PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the agent to try and jump.", "command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the message '%s'\" %", "180 :param val: :type val: float :param degrees: :type degrees: bool :param absolute:", "z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 elif adjusted <", "elif adjusted == 90 or adjusted == -270: return z, 0 elif adjusted", "VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\",", "\",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code):", "= math.cos(radjusted * -1 - 270) * z ny = math.sqrt(math.pow(z, 2) -", "each inner list is the length of specified column_length. :param response: :param column_length:", "type, as well verify that if the message is for a sensor or", "else: z = math.fabs(y) nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny)", "conditions: :param actions: :return: \"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name,", ":type name: str :param length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name,", "list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param port: :return:", "% (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") #", "/ x) * 180 / math.pi + 90 else: if y < 0:", "pagi_world) def jump(self): \"\"\" Causes the agent to try and jump. He will", "socket, after self.__timeout seconds, function will raise socket.timeout exception. If block is set", "float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list of 11 (rows) x 16 (columns)", "which contains all of his periphal vision. vision[0][0] represents lower left of the", "(his bottom is touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\")", "and if absolute is False, he'll be looking to the right at 270", "the response into a list of lists where each inner list is the", "j in range(1, len(response)): if (j - 1) % column_length == 0: if", "socket import time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\",", "0: return 0, z elif adjusted == 180 or adjusted == -180: return", "the specified properties :param name: :param image_file: :param x: :param y: :param m:", "file at '%s' was not found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" %", "timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET,", "== 'L' \\ and not direction.upper() == 'RIGHT' and not direction.upper() == 'LEFT':", "\"\"\" :param ip: :param port: :return: \"\"\" self.pagi_socket = None self.__ip_address = ip_address", "ValueError(\"You must pass in a valid PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world", "degrees=True, absolute=False): \"\"\" Rotate the agent some number of degrees/radians. If absolute is", "[\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" %", "hands in starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's", "def jump(self): \"\"\" Causes the agent to try and jump. He will only", "list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close", "degrees/radians. If absolute is True, then we rotate to position specified from 0", "PAGIWorld): raise ValueError(\"You must pass in a valid PagiWorld variable to PagiAgent\") self.pagi_world", "math.cos(radjusted - 270) * z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx,", "nx = math.cos(radjusted - 90) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx,", ":type direction: str :return: \"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper() == \"R\"", "\"A\", \"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i)", "socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and response !=", "is blank, then we just return the first message from the socket, otherwise", "\"\"\" Loads a task in PAGIworld. We additionally save the task file name", "float) of the x, y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand)", "jump(self): \"\"\" Causes the agent to try and jump. He will only be", "= self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list of 11", "text to the PAGIworld console window. :param text: :type text: str :return: \"\"\"", "relative to where he's looking. Therefore, if he's looking down at 180 degrees,", "= math.cos(radjusted * -1 - 180) * z * -1 nx = math.sqrt(math.pow(z,", "task_file): \"\"\" Loads a task in PAGIworld. We additionally save the task file", ":param name: :param conditions: :param actions: :return: \"\"\" if actions is not None:", "None: raise RuntimeError(\"No open socket. Use connect() to open a new socket connection\")", "pagi_socket: socket.socket :type __ip_address: str :type __port: int :type __timeout: float :type __message_fragment:", "- math.pow(ny, 2)) * -1 elif adjusted < -180: nx = math.cos(radjusted *", "name, x_coord, y_coord, description=None): \"\"\" Creates an item and drops into into PAGIworld.", ":type degrees: bool :param absolute: :type absolute: bool :return: \"\"\" if not degrees:", "math.pi if absolute: val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\")", ":type x: float :param y: :type y: float :param absolute: :type absolute: bool", "\"\"\" Opens the hand, releasing anything it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\"", "state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns", ":param absolute: :type absolute: bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand,", "states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\" Sets a reflex in PAGIworld to", "= adjusted * math.pi / 180 if adjusted == 0: return 0, z", ":param z: :param rotation: :return: \"\"\" if x == 0: if y <", "float(response[1]), float(response[2]) def release(self): \"\"\" Opens the hand, releasing anything it could be", "method to process returned vision repsonse. Splits the response into a list of", "\"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def", "field with vision[10][15] representing upper right :return: list of size 11 x 16", "and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation = self.get_rotation() if", "\",\": return self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads", "+ ay ** 2) angle = math.acos(ay / hyp) z = math.sin(angle) *", "self.hand) def grab(self): \"\"\" Closes the hand, grabbing anything it is touching :return:", "self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes the hand, grabbing", "/ 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m, ph, r, e, k))", "nx, ny def get_position(self): \"\"\" Gets x/y coordinates of the agent in the", "bool True if agent has jumped (his bottom is touching something solid) otherwise", "/ math.pi + 270 adjusted = rotation - angle radjusted = adjusted *", "and not direction.upper() == 'RIGHT' and not direction.upper() == 'LEFT': raise ValueError(\"You can", "bottom, +x is towards his right side, -x is his left side. If", "the left or right. :param paces: :type paces: int :param direction: :type direction:", "= math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted", "\"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a", "self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create", "raise RuntimeError(\"Invalid force '%s' in message '%s'\" % (secondary, message)) # all messages", ":return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord,", "\"\"\" Close the socket to PAGIWorld and then reset internal variables (in case", "function will raise socket.timeout exception. If block is set to true, no exception", ":return: \"\"\" if not direction.upper() == 'R' and not direction.upper() == 'L' \\", "% message) end = message[len(command)+1:].find(\",\") if end == -1: secondary = message[len(command)+1:] else:", "FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was not found\"", "list() for j in range(1, len(response)): if (j - 1) % column_length ==", ":param name: :param image_file: :param x: :param y: :param m: :param ph: :param", "window. :param text: :type text: str :return: \"\"\" text = str(text) self.send_message(\"print,%s\" %", "If absolute is true, then vector +y is world up, -y is world", "jumped (his bottom is touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response =", "__message_fragment: str :type __task_file: str :type message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209,", "message_index == -1: break else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code", "math.acos(z / x) * 180 / math.pi + 270 adjusted = rotation -", "right, and if it isn't, raise exception :param direction: :return: \"\"\" if not", "is set to False, and there's no response from the socket, after self.__timeout", "= PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket to the", "-x is his left side. If absolute is true, then vector +y is", "== \"\" or (response[:len(code)] == code and response[len(code)] == \",\"): break else: self.message_stack.append(response)", "as well verify that if the message is for a sensor or action,", "pagi_world def get_position(self): \"\"\" Gets the position of the hand relative to the", "VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the message '%s'\" % message) end =", "= math.acos(z / y) * 180 / math.pi else: angle = math.acos(z /", "\"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address:", "return nx, ny def get_position(self): \"\"\" Gets x/y coordinates of the agent in", "exception :param direction: :return: \"\"\" if not direction.upper() == 'R' and not direction.upper()", "math import os import socket import time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\",", "\"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket:", "list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions, actions=None):", "y = float(y) if not absolute or (x == 0 and y ==", "grabbing anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand)", "anything :param code: :type code: str :param block: :type block: bool :return: :raises:", "* -1 - 270) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2))", "vision repsonse. Splits the response into a list of lists where each inner", "= task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets the task to the", "% (secondary, message)) elif command == \"addForce\" and secondary not in VALID_FORCES: raise", "file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text to the PAGIworld console", "the x, y coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response =", "- 90) * z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2))", "If we don't, exception will be raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket", ":type code: str :param block: :type block: bool :return: :raises: socket.timeout \"\"\" if", "calls. :param message: :type message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if ERROR_CHECK:", "Close the socket to PAGIWorld and then reset internal variables (in case we", "hyp = math.sqrt(ax ** 2 + ay ** 2) angle = math.acos(ay /", "self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type pagi_world: PAGIWorld :type left_hand: PAGIAgentHand :type right_hand:", "angle = math.acos(ay / hyp) z = math.sin(angle) * ay else: if x", "text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set a state within PAGIworld. :param", "vectors are relative to the direction agent is looking, thus +y is always", "PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def", "or right, and if it isn't, raise exception :param direction: :return: \"\"\" if", "degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %=", "str :return: \"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper() == \"R\" else -1", "name): \"\"\" Removes a reflex completely from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\"", "in self.load_task. If one wasn't loaded, then a RuntimeError will be raised. :raises:", "PAGIWorld and then reset internal variables (in case we just use connect directly", "zero (so that can't ever really be in a state) :param name: :return:", "\"\"\" if description is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord,", "self.hand) def assert_left_or_right(direction): \"\"\" Checks that the given direction is either left or", "self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's rotation back to 0 degrees (looking", "Gets x/y coordinates of the agent in the world :return: tuple(float, float) of", "(name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\"", "* -1) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1", "= self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self): \"\"\" Resets agent state back", "will stop in this function if socket doesn't return anything :param code: :type", "- math.pow(ny, 2)) * -1 else: nx = math.cos(radjusted - 270) * z", "the socket, after self.__timeout seconds, function will raise socket.timeout exception. If block is", "message[:message.find(\",\")] if command == \"\" or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid command", "is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x,", "360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns", "self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the", "* 180 / math.pi + 180 else: angle = math.acos(z / x) *", "-1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx = math.cos(radjusted *", "degrees, if absolute is True, he'll be looking to the left at 90", "given :param ip: :param port: :return: :raises: ConnectionRefusedError \"\"\" if ip_address == \"\":", ":return: \"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper() == \"R\" else -1 cnt", "x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments", "no exception will be thrown, but program will stop in this function if", "a new item in PAGIworld with the specified properties :param name: :param image_file:", "* -1 - 180) * z * -1 nx = math.sqrt(math.pow(z, 2) -", "or action to prevent bad calls. :param message: :type message: str :return: :raises:", "of the vision field with vision[10][15] representing upper right :return: list of size", "list() current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\" Moves both of the agent's", "__init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\"", "the position of the hand relative to the agent :return: tupe(float, float) of", "\"\"\" if self.__task_file == \"\" or self.__task_file is None: raise RuntimeError(\"Cannot reset task,", "socket doesn't return anything :param code: :type code: str :param block: :type block:", "the left at 90 degrees and if absolute is False, he'll be looking", "= 1 if direction[0].upper() == \"R\" else -1 cnt = 0 while cnt", "nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted < 180: nx =", "Print text to the PAGIworld console window. :param text: :type text: str :return:", "to try and jump. He will only be able to if his bottom", "new socket connection\") def send_message(self, message): \"\"\" Send a message to the socket.", "= math.cos(radjusted - 90) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2))", "\"\"\" Creates an item and drops into into PAGIworld. These items are the", "self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list of all the", "y: float :param absolute: :type absolute: bool :return: \"\"\" if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\"", "if necessary :param task_file: :type task_file: str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file):", "to the hand moving it :param x: :type x: float :param y: :type", ":param degrees: :type degrees: bool :param absolute: :type absolute: bool :return: \"\"\" if", "self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print text to the PAGIworld console window. :param", "rotate him relative to where he's looking. Therefore, if he's looking down at", "# pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address: str :type", "0: ax = math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax ** 2 +", "all of his periphal vision. vision[0][0] represents lower left of the vision field", "x: :param y: :param z: :param rotation: :return: \"\"\" if x == 0:", "reset task, no previous task file found\") self.load_task(self.__task_file) def print_text(self, text): \"\"\" Print", "if x > 0: angle = 270 else: angle = 90 elif x", ":param degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation =", "a valid sensor or action to prevent bad calls. :param message: :type message:", "remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld by just setting it's duration to", "self.__task_file is None: raise RuntimeError(\"Cannot reset task, no previous task file found\") self.load_task(self.__task_file)", "n: :return: \"\"\" if description is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" %", "class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand", "-270: ny = math.cos(radjusted * -1 - 180) * z * -1 nx", "adjusted == -180: return 0, (-1 * z) elif adjusted == 90 or", "* z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if", "to the agent to move his body. If absolute is False, then vectors", "y: :param n: :return: \"\"\" if description is None or description == \"\":", "with hands in starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the", "math.sin(angle) * ay else: if x != 0: z = math.fabs(x) else: z", "of paces (defined as one width of his body) to either the left", "angle = 180 else: angle = 0 elif y == 0: if x", "socket.timeout exception. If block is set to true, no exception will be thrown,", "in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self,", "degrees: rotation = rotation * 180 / math.pi return rotation def move_paces(self, paces,", "and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def", "= \"\" self.message_stack = list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def connect(self,", "degrees: r = r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x,", "\"\"\" Send a message to the socket. We make sure that the message", "with that code, saving all other messages to a stack. If block is", "True and response != \"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode()", "if (1) the stack isn't empty and (2) either code is blank or", "\"\"\" if x == 0: if y < 0: angle = 180 else:", "str \"\"\" if len(self.message_stack) > 0: if code != \"\": for index in", "code: str :param block: :type block: bool :return: :raises: socket.timeout \"\"\" if block:", "have an existing socket connection. If we don't, exception will be raised. :return:", "self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y, absolute=False): \"\"\" Sends", "'%s'\" % message) end = message[len(command)+1:].find(\",\") if end == -1: secondary = message[len(command)+1:]", "r = r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y,", "math.pi + 90 else: if y < 0: angle = math.acos(z / y)", "messages must end with \\n if message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode())", "/ 180 if adjusted == 0: return 0, z elif adjusted == 180", "socket.socket :type __ip_address: str :type __port: int :type __timeout: float :type __message_fragment: str", "get_periphal_vision(self): \"\"\" Returns a list of 11 (rows) x 16 (columns) points which", "in VALID_COMMANDS: raise RuntimeError(\"Invalid command found in the message '%s'\" % message) end", "that we have an existing socket connection. If we don't, exception will be", "def grab(self): \"\"\" Closes the hand, grabbing anything it is touching :return: \"\"\"", "by just setting it's duration to zero (so that can't ever really be", "%= 360 if degrees: rotation = rotation * 180 / math.pi return rotation", "adjusted < 90: ny = math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2) -", "== 180 or adjusted == -180: return 0, (-1 * z) elif adjusted", "move the agent some number of paces (defined as one width of his", ":type left_hand: PAGIAgentHand :type right_hand: PAGIAgentHand \"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world,", "edge is touching something solid, otherwise he'll do nothing. :return: bool True if", "== 90 or adjusted == -270: return z, 0 elif adjusted == 270", "isn't, raise exception :param direction: :return: \"\"\" if not direction.upper() == 'R' and", "the socket to PAGIWorld and then reset internal variables (in case we just", "raise RuntimeError(\"Cannot reset task, no previous task file found\") self.load_task(self.__task_file) def print_text(self, text):", "it isn't, raise exception :param direction: :return: \"\"\" if not direction.upper() == 'R'", "+ 1] if command == \"sensorRequest\" and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid", "\"\" or self.__task_file is None: raise RuntimeError(\"Cannot reset task, no previous task file", "get_all_reflexes(self): \"\"\" Returns a list of all the active reflexes in PAGIworld :return:", "31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in", "agent state back to a starting position (looking upward with hands in starting", "= \"\" self.__task_file = \"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address,", ":return: \"\"\" if not degrees: val = val * 180. / math.pi if", "\"sensorRequest\" and secondary not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message '%s'\"", ":type pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world", "duration to zero (so that can't ever really be in a state) :param", "new item in PAGIworld with the specified properties :param name: :param image_file: :param", "= \"\" self.__task_file = \"\" self.message_stack = list() self.connect(ip_address, port, timeout) self.agent =", "If one wasn't loaded, then a RuntimeError will be raised. :raises: RuntimeError \"\"\"", "degrees: :type degrees: bool :param absolute: :type absolute: bool :return: \"\"\" if not", "else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self,", "where he's looking. Therefore, if he's looking down at 180 degrees, and we", "socket connection\") def send_message(self, message): \"\"\" Send a message to the socket. We", "math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx = math.cos(radjusted * -1 - 270)", "= self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)] == code and response[len(code)] ==", "% name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list of all states that", "2 + ay ** 2) angle = math.acos(ay / hyp) z = math.sin(angle)", "self.__timeout = timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.pagi_socket", ":param text: :type text: str :return: \"\"\" text = str(text) self.send_message(\"print,%s\" % text)", "z elif adjusted == 180 or adjusted == -180: return 0, (-1 *", "force to the agent to move his body. If absolute is False, then", "found in the message '%s'\" % message) end = message[len(command)+1:].find(\",\") if end ==", "self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and response != \"\": while \"\\n\" not", ":param k: :param degrees: :return: \"\"\" if degrees: r = r * math.pi", "socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket to PAGIWorld", "PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the agent to", ":param x: :param y: :param z: :param rotation: :return: \"\"\" if x ==", "nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx = math.cos(radjusted * -1", "\"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a", "= \"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout)", "if (j - 1) % column_length == 0: if len(current) > 0: vision.append(current)", "the task to the one that was loaded in self.load_task. If one wasn't", "180: nx = math.cos(radjusted - 90) * z ny = math.sqrt(math.pow(z, 2) -", "== 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation =", "self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment = \"\" self.__task_file", "the message is for a sensor or action, that it's a valid sensor", "return 0, (-1 * z) elif adjusted == 90 or adjusted == -270:", "rotation - angle radjusted = adjusted * math.pi / 180 if adjusted ==", "from the socket, after self.__timeout seconds, function will raise socket.timeout exception. If block", "def move_paces(self, paces, direction='L'): \"\"\" Attempts to move the agent some number of", "and not direction.upper() == 'LEFT': raise ValueError(\"You can only use a L or", "text: str :return: \"\"\" text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self,", "if absolute: val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def", "otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) == 1 def reset_agent(self):", "- math.pow(nx, 2)) return nx, ny def get_position(self): \"\"\" Gets x/y coordinates of", "active reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:]", ":return: str \"\"\" if len(self.message_stack) > 0: if code != \"\": for index", "__copyright__ = \"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\" import", "def get_detailed_vision(self): \"\"\" Returns a list of ?x? points which contains all of", "180) * z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else:", "and we tell him to rotate 90 degrees, if absolute is True, he'll", ":type block: bool :return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code)", "in range(0, 31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for", "= math.cos(radjusted - 270) * z * -1 ny = math.sqrt(math.pow(z, 2) -", "270 else: angle = 90 elif x < 0: if y > 0:", "PAGIWorld \"\"\" def __init__(self, hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world", "x == 0: if y < 0: angle = 180 else: angle =", "Sends a vector of force to the hand moving it :param x: :type", "TODO: Finish and simplify :param x: :param y: :param z: :param rotation: :return:", "vision. vision[0][0] represents lower left of the vision field with vision[10][15] representing upper", "sensor or action, that it's a valid sensor or action to prevent bad", "- math.pow(nx, 2)) * -1 elif adjusted < -270: ny = math.cos(radjusted *", ":param name: :param x: :param y: :param n: :return: \"\"\" if description is", "- math.pow(ny, 2)) else: nx = math.cos(radjusted * -1 - 270) * z", "is his left side. If absolute is true, then vector +y is world", "always in direction of top of agent, -y is bottom, +x is towards", "\"\"\" x = float(x) y = float(y) if not absolute or (x ==", "task_file) def reset_task(self): \"\"\" Resets the task to the one that was loaded", "/ math.pi return rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts to move the", "RuntimeError will be raised. :raises: RuntimeError \"\"\" if self.__task_file == \"\" or self.__task_file", "up), otherwise rotate him relative to where he's looking. Therefore, if he's looking", "paces: :type paces: int :param direction: :type direction: str :return: \"\"\" assert_left_or_right(direction) val", "= str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set a", "-1 elif adjusted < -270: ny = math.cos(radjusted * -1 - 180) *", "import math import os import socket import time ERROR_CHECK = True VALID_COMMANDS =", ":return: :raises: ConnectionRefusedError \"\"\" if ip_address == \"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address =", "hyp) z = math.sin(angle) * ay else: if x != 0: z =", "VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message '%s'\" % (secondary, message)) elif command", "a valid PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world)", "\"Copyright 2015, RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\" import math import", "os import socket import time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\",", "RuntimeError \"\"\" if self.pagi_socket is None: raise RuntimeError(\"No open socket. Use connect() to", "ny = math.cos(radjusted - 180) * z * -1 nx = math.sqrt(math.pow(z, 2)", "angle = 90 elif x < 0: if y > 0: angle =", "[\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\",", "sure that the message is a valid action type, as well verify that", "else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks that the given direction", "the one that was loaded in self.load_task. If one wasn't loaded, then a", "directly without creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make", "float :param degrees: :type degrees: bool :param absolute: :type absolute: bool :return: \"\"\"", "self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a list", ":return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that we have an existing", "+ 270 adjusted = rotation - angle radjusted = adjusted * math.pi /", "it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def", "PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that we have", "if not absolute or (x == 0 and y == 0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" %", "pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the agent to try", "either left or right, and if it isn't, raise exception :param direction: :return:", "direction: :type direction: str :return: \"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper() ==", "upward) :param degrees: :type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation", "Creates a new item in PAGIworld with the specified properties :param name: :param", "PAGIworld by just setting it's duration to zero (so that can't ever really", "well verify that if the message is for a sensor or action, that", "code == \"\" or (response[:len(code)] == code and response[len(code)] == \",\"): break else:", "180. / math.pi if absolute: val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" %", "a valid action type, as well verify that if the message is for", "r: :param e: :param k: :param degrees: :return: \"\"\" if degrees: r =", "all of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response,", "stack. If block is set to False, and there's no response from the", "2)) else: nx = math.cos(radjusted * -1 - 270) * z ny =", "\"\"\" def __init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in", "else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\" Attempts", "points which contains all of his periphal vision. vision[0][0] represents lower left of", "description=None): \"\"\" Creates an item and drops into into PAGIworld. These items are", "VALID_SENSORS.append(\"R%d\" % i) for i in range(0, 31): for j in range(0, 21):", "math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if adjusted < -90: ny = math.cos(radjusted", "(2) either code is blank or it matches something on the message stack", "= PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the agent", "__init__(self, pagi_world): if not isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in a valid", "PAGIAgentHand('r', pagi_world) def jump(self): \"\"\" Causes the agent to try and jump. He", "range(1, len(response)): if (j - 1) % column_length == 0: if len(current) >", "end = message[len(command)+1:].find(\",\") if end == -1: secondary = message[len(command)+1:] else: secondary =", "if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" %", "0: angle = math.acos(z / y) * 180 / math.pi + 180 else:", "hand relative to the agent :return: tupe(float, float) of the x, y coordinates", "\"\"\" if not direction.upper() == 'R' and not direction.upper() == 'L' \\ and", "is the length of specified column_length. :param response: :param column_length: :return: \"\"\" vision", "true, no exception will be thrown, but program will stop in this function", "= list() current.append(response[j]) vision.append(current) return vision def center_hands(self): \"\"\" Moves both of the", "is None or description == \"\": self.send_message(\"dropItem,%s,%f,%f\" % (name, x_coord, y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\"", "* z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted", "isn't empty and (2) either code is blank or it matches something on", "__get_message_from_stack(self, code): \"\"\" Attempts to return a message from the stack if (1)", "(rows) x 16 (columns) points which contains all of his periphal vision. vision[0][0]", "rotation: :return: \"\"\" if x == 0: if y < 0: angle =", "> 0: if adjusted < 90: ny = math.cos(radjusted) * z nx =", "\"\"\" Causes the agent to try and jump. He will only be able", "val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True):", "vision[0][0] represents lower left of the vision field with vision[10][15] representing upper right", "% self.hand) def assert_left_or_right(direction): \"\"\" Checks that the given direction is either left", "direction.upper() == 'R' and not direction.upper() == 'L' \\ and not direction.upper() ==", "\"RP\", \"A\", \"MDN\", \"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" %", "verify that if the message is for a sensor or action, that it's", "reset_task(self): \"\"\" Resets the task to the one that was loaded in self.load_task.", "one wasn't loaded, then a RuntimeError will be raised. :raises: RuntimeError \"\"\" if", "to PAGIWorld and then reset internal variables (in case we just use connect", "else: secondary = message[len(command)+1:end + len(command) + 1] if command == \"sensorRequest\" and", "len(self.message_stack) > 0: if code != \"\": for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)]", "else: angle = 90 elif x < 0: if y > 0: angle", "his body) to either the left or right. :param paces: :type paces: int", "\"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y, absolute=False): \"\"\"", ":return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the agent", "anything it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" % self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand)", "it's duration to zero (so that can't ever really be in a state)", "agent to try and jump. He will only be able to if his", "= [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\",", "VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\",", "self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a reflex completely", "raise socket.timeout exception. If block is set to true, no exception will be", "him to rotate 90 degrees, if absolute is True, he'll be looking to", "< -180: nx = math.cos(radjusted * -1 - 90) * z * -1", "self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name,", "= rotation * 180 / math.pi return rotation def move_paces(self, paces, direction='L'): \"\"\"", "connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket to the given :param ip:", ":type degrees: bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation", "if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None", "\"\"\" Sends a vector force to the agent to move his body. If", "== 0: if y < 0: angle = 180 else: angle = 0", "matching message with that code, saving all other messages to a stack. If", "absolute=True) time.sleep(2) cnt += 1 def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a", ":raises: RuntimeError \"\"\" if self.__task_file == \"\" or self.__task_file is None: raise RuntimeError(\"Cannot", "a task in PAGIworld. We additionally save the task file name so we", "ever really be in a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name)", "list of ?x? points which contains all of his detailed vision :return: \"\"\"", "message is for a sensor or action, that it's a valid sensor or", "return states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\" Sets a reflex in PAGIworld", "left side. If absolute is true, then vector +y is world up, -y", "to rotate 90 degrees, if absolute is True, he'll be looking to the", "message_stack: list \"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param port:", "self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360 if degrees: rotation", "(-1 * z), 0 else: if adjusted > 0: if adjusted < 90:", "is bottom, +x is towards his right side, -x is his left side.", "= float(x) y = float(y) if not absolute or (x == 0 and", "the world :return: tuple(float, float) of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response =", "raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket is None: raise RuntimeError(\"No open socket.", "def remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld by just setting it's duration", "name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a list", "RAIR Lab\" __credits__ = [\"<NAME>\"] __license__ = \"MIT\" import math import os import", "into PAGIworld. These items are the ones pre-built into PAGIworld. :param name: :param", "y: float :param absolute: :type absolute: bool :return: \"\"\" x = float(x) y", "math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx, ny def get_position(self): \"\"\" Gets x/y", "Moves both of the agent's hands to the center of his body :return:", "% (i, j)) for i in range(0, 16): for j in range(0, 11):", "z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) else: nx =", "= val * 180. / math.pi if absolute: val %= 360. val -=", "if end == -1: secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command)", "Returns rotation in either degrees (0 - 359) or radians (0 - 2*pi)", "port: :return: \"\"\" self.pagi_socket = None self.__ip_address = ip_address self.__port = port self.__timeout", "% self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes the hand, grabbing anything", "cnt = 0 while cnt < paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt", "\"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for", "Make sure that we have an existing socket connection. If we don't, exception", "length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld by just setting", "* 180 / math.pi return rotation def move_paces(self, paces, direction='L'): \"\"\" Attempts to", "is False, then vectors are relative to the direction agent is looking, thus", "get_all_states(self): \"\"\" Returns a list of all states that are currently in PAGIworld.", "name, conditions, actions=None): \"\"\" Sets a reflex in PAGIworld to be carried out", "response[len(code)] == \",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def", "-1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 else: nx =", "__init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param port: :return: \"\"\" self.pagi_socket =", "item and drops into into PAGIworld. These items are the ones pre-built into", "ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param port: :return: \"\"\" self.pagi_socket = None", "we just return the first message from the socket, otherwise return the first", "stop in this function if socket doesn't return anything :param code: :type code:", "(1) the stack isn't empty and (2) either code is blank or it", "and (2) either code is blank or it matches something on the message", "* math.pi / 180 if adjusted == 0: return 0, z elif adjusted", "__port: int :type __timeout: float :type __message_fragment: str :type __task_file: str :type message_stack:", ":param paces: :type paces: int :param direction: :type direction: str :return: \"\"\" assert_left_or_right(direction)", "for i in range(0, 31): for j in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i,", "True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"]", "port=42209, timeout=3): \"\"\" Create a socket to the given :param ip: :param port:", "on the message stack :param code: :return: str \"\"\" if len(self.message_stack) > 0:", "\"\"\" Creates a new item in PAGIworld with the specified properties :param name:", "we tell him to rotate 90 degrees, if absolute is True, he'll be", "his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod", "return float(response[1]), float(response[2]) def release(self): \"\"\" Opens the hand, releasing anything it could", "absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the agent some number of", "\"\"\" Sends a vector of force to the hand moving it :param x:", "adjusted * math.pi / 180 if adjusted == 0: return 0, z elif", "\"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket", "None self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment = \"\"", "if len(current) > 0: vision.append(current) current = list() current.append(response[j]) vision.append(current) return vision def", "math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax ** 2 + ay ** 2)", "math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted < 180: nx = math.cos(radjusted -", "to process returned vision repsonse. Splits the response into a list of lists", ":param r: :param e: :param k: :param degrees: :return: \"\"\" if degrees: r", "nx, ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny))", "self.hand = hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\" Gets the position of", "loaded in self.load_task. If one wasn't loaded, then a RuntimeError will be raised.", "name, length): \"\"\" Set a state within PAGIworld. :param name: :type name: str", "if not absolute: self.pagi_world.send_message(\"%sHvec,%f,%f\" % (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand)", "(defined as one width of his body) to either the left or right.", "of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\"", "absolute is False, then vectors are relative to the direction agent is looking,", "math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 elif adjusted < -180: nx =", "ny = PAGIAgent.__get_relative_vector(x, y, z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\")", "If code is blank, then we just return the first message from the", "message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets", "body) to either the left or right. :param paces: :type paces: int :param", "block: bool :return: :raises: socket.timeout \"\"\" if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while", "-1 else: nx = math.cos(radjusted - 270) * z * -1 ny =", "set to False, and there's no response from the socket, after self.__timeout seconds,", "else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)]", "pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address: str :type __port:", "isinstance(pagi_world, PAGIWorld): raise ValueError(\"You must pass in a valid PagiWorld variable to PagiAgent\")", ":type y: float :param absolute: :type absolute: bool :return: \"\"\" x = float(x)", "e: :param k: :param degrees: :return: \"\"\" if degrees: r = r *", "== 0: if x > 0: angle = 270 else: angle = 90", "z, rotation) print(nx, ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y,", "of ?x? points which contains all of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\")", "-1: secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command) + 1] if", "* -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if adjusted <", "Create a socket to the given :param ip: :param port: :return: :raises: ConnectionRefusedError", "is for a sensor or action, that it's a valid sensor or action", "< 180: nx = math.cos(radjusted - 90) * z ny = math.sqrt(math.pow(z, 2)", "raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self, hand, pagi_world):", "== 0: if len(current) > 0: vision.append(current) current = list() current.append(response[j]) vision.append(current) return", "= None self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment =", "in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index == -1: break", "to the agent :return: tupe(float, float) of the x, y coordinates of the", "= r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m,", "= math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 else: nx = math.cos(radjusted -", "the direction agent is looking, thus +y is always in direction of top", "self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets messages from the socket. If code", "vision[10][15] representing upper right :return: list of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\")", "Rotate the agent some number of degrees/radians. If absolute is True, then we", "\"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None):", "x=0, y=0, absolute=False): \"\"\" Sends a vector force to the agent to move", "== \"R\" else -1 cnt = 0 while cnt < paces: self.send_force(x=(val *", "1 def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a vector force to the", "socket, otherwise return the first matching message with that code, saving all other", "up, -y is world bottom, +x is world right and -x is world", "270) * z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else:", "hand, pagi_world): assert_left_or_right(hand) self.hand = hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\" Gets", "action type, as well verify that if the message is for a sensor", "if not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was not found\" % task_file)", "column_length. :param response: :param column_length: :return: \"\"\" vision = list() current = list()", "val = val * 180. / math.pi if absolute: val %= 360. val", "action to prevent bad calls. :param message: :type message: str :return: :raises: RuntimeError", "0): self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (x, y)) else: rotation = self.get_rotation() if x != 0", "90) * z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif", "\"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper() == \"R\" else -1 cnt =", "(in case we just use connect directly without creating new PAGIWorld instance) :return:", "270 180 :param val: :type val: float :param degrees: :type degrees: bool :param", "x, y, m, ph, r, e, k, degrees=True): \"\"\" Creates a new item", "the agent some number of degrees/radians. If absolute is True, then we rotate", "r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m, ph,", "math.fabs(y) hyp = math.sqrt(ax ** 2 + ay ** 2) angle = math.acos(ay", "self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self,", "\"\"\" Gets the position of the hand relative to the agent :return: tupe(float,", "= math.cos(radjusted * -1 - 90) * z * -1 ny = math.sqrt(math.pow(z,", "== 0: return 0, z elif adjusted == 180 or adjusted == -180:", "* -1 elif adjusted < -270: ny = math.cos(radjusted * -1 - 180)", "that was loaded in self.load_task. If one wasn't loaded, then a RuntimeError will", "state within PAGIworld. :param name: :type name: str :param length: :type length: int", "left or right, and if it isn't, raise exception :param direction: :return: \"\"\"", "from PAGIworld by just setting it's duration to zero (so that can't ever", "self.hand) self.pagi_world.get_message(code=\"%sHR\" % self.hand) def grab(self): \"\"\" Closes the hand, grabbing anything it", "text: :type text: str :return: \"\"\" text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\")", "ay ** 2) angle = math.acos(ay / hyp) z = math.sin(angle) * ay", "additionally save the task file name so we can reset things if necessary", "2)) elif adjusted < 180: nx = math.cos(radjusted - 90) * z ny", "math.pow(ny, 2)) elif adjusted < 180: nx = math.cos(radjusted - 90) * z", "\\n if message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False):", "carried out on conditions. :param name: :param conditions: :param actions: :return: \"\"\" if", "pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks that the given direction is", "port=42209, timeout=3): \"\"\" :param ip: :param port: :return: \"\"\" self.pagi_socket = None self.__ip_address", "def send_message(self, message): \"\"\" Send a message to the socket. We make sure", ":param length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def", "self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation in either", "-y is world bottom, +x is world right and -x is world left.", "- 1) % column_length == 0: if len(current) > 0: vision.append(current) current =", "response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\") return float(response[1]), float(response[2]) def release(self): \"\"\" Opens the", "Returns a list of 11 (rows) x 16 (columns) points which contains all", "raised. :raises: RuntimeError \"\"\" if self.__task_file == \"\" or self.__task_file is None: raise", "None: raise RuntimeError(\"Cannot reset task, no previous task file found\") self.load_task(self.__task_file) def print_text(self,", "= 0 elif y == 0: if x > 0: angle = 270", "\"\": ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port self.__timeout = timeout", "and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message '%s'\" %", "it matches something on the message stack :param code: :return: str \"\"\" if", "self.__message_fragment.find(\"\\n\") if message_index == -1: break else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:]", "range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in range(0, 16): for j", "in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message '%s'\" % (secondary, message)) elif", "def get_message(self, code=\"\", block=False): \"\"\" Gets messages from the socket. If code is", "release(self): \"\"\" Opens the hand, releasing anything it could be holding :return: \"\"\"", "items are the ones pre-built into PAGIworld. :param name: :param x: :param y:", "is looking, thus +y is always in direction of top of agent, -y", "save the task file name so we can reset things if necessary :param", "PAGIworld to be carried out on conditions. :param name: :param conditions: :param actions:", "left at 90 degrees and if absolute is False, he'll be looking to", ":param ip: :param port: :return: \"\"\" self.pagi_socket = None self.__ip_address = ip_address self.__port", "looking down at 180 degrees, and we tell him to rotate 90 degrees,", "command found in the message '%s'\" % message) end = message[len(command)+1:].find(\",\") if end", "/ y) * 180 / math.pi else: angle = math.acos(z / x) *", "at 90 degrees and if absolute is False, he'll be looking to the", "bool :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,A\") response = self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360", "to either the left or right. :param paces: :type paces: int :param direction:", "= math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1 elif adjusted < -270: ny", "response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list of", "self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\" Sets", "function if socket doesn't return anything :param code: :type code: str :param block:", "we don't, exception will be raised. :return: :raises: RuntimeError \"\"\" if self.pagi_socket is", "\"\"\" TODO: Finish and simplify :param x: :param y: :param z: :param rotation:", "rotation = self.get_rotation() if x != 0 and y != 0: ax =", "Python PAGIworld API \"\"\" __author__ = \"<NAME>\" __copyright__ = \"Copyright 2015, RAIR Lab\"", "moving it :param x: :type x: float :param y: :type y: float :param", "degrees: val = val * 180. / math.pi if absolute: val %= 360.", "self.load_task. If one wasn't loaded, then a RuntimeError will be raised. :raises: RuntimeError", "359) or radians (0 - 2*pi) of agent (0 is looking upward) :param", "math.pi + 180 else: angle = math.acos(z / x) * 180 / math.pi", "self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index == -1: break else: response = self.__message_fragment[:message_index]", "ny = math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif", "or adjusted == -90: return (-1 * z), 0 else: if adjusted >", "ip_address self.__port = port self.__timeout = timeout self.__message_fragment = \"\" self.__task_file = \"\"", "hand[0].upper() self.pagi_world = pagi_world def get_position(self): \"\"\" Gets the position of the hand", "list of lists where each inner list is the length of specified column_length.", "all messages must end with \\n if message[-1] != \"\\n\": message += \"\\n\"", "x: float :param y: :type y: float :param absolute: :type absolute: bool :return:", "/ hyp) z = math.sin(angle) * ay else: if x != 0: z", "math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m, ph, r, e,", "out on conditions. :param name: :param conditions: :param actions: :return: \"\"\" if actions", "code: :type code: str :param block: :type block: bool :return: :raises: socket.timeout \"\"\"", "states that are currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\")", "'%s' was not found\" % task_file) self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def", "adjusted < -90: ny = math.cos(radjusted * -1) * z nx = math.sqrt(math.pow(z,", "image_file: :param x: :param y: :param m: :param ph: :param r: :param e:", "90: ny = math.cos(radjusted) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2))", "time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\",", "no response from the socket, after self.__timeout seconds, function will raise socket.timeout exception.", "block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\" Attempts to return a", "\"MPN\"] for i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for i", "vision field with vision[10][15] representing upper right :return: list of size 11 x", "\"\"\" Returns a list of 11 (rows) x 16 (columns) points which contains", "+= 1 def send_force(self, x=0, y=0, absolute=False): \"\"\" Sends a vector force to", "item in PAGIworld with the specified properties :param name: :param image_file: :param x:", "__timeout: float :type __message_fragment: str :type __task_file: str :type message_stack: list \"\"\" def", "ones pre-built into PAGIworld. :param name: :param x: :param y: :param n: :return:", "import socket import time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\",", "to the center of his body :return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\"", "\"\"\" Checks that the given direction is either left or right, and if", "a new socket connection\") def send_message(self, message): \"\"\" Send a message to the", "!= \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\", block=False): \"\"\" Gets messages", "end == -1: secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command) +", "return the first matching message with that code, saving all other messages to", "name: :param x: :param y: :param n: :return: \"\"\" if description is None", "position specified from 0 (looking up), otherwise rotate him relative to where he's", "in message '%s'\" % (secondary, message)) elif command == \"addForce\" and secondary not", "is always in direction of top of agent, -y is bottom, +x is", "send_force(self, x, y, absolute=False): \"\"\" Sends a vector of force to the hand", "\"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if", "the active reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\") return", "x) * 180 / math.pi + 270 adjusted = rotation - angle radjusted", "ph: :param r: :param e: :param k: :param degrees: :return: \"\"\" if degrees:", "-180: nx = math.cos(radjusted * -1 - 90) * z * -1 ny", "self.message_stack = list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209,", "= message[len(command)+1:end + len(command) + 1] if command == \"sensorRequest\" and secondary not", "is None: raise RuntimeError(\"Cannot reset task, no previous task file found\") self.load_task(self.__task_file) def", "\"\"\" Closes the hand, grabbing anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" %", "ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\",", "code and response[len(code)] == \",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return", "-90: return (-1 * z), 0 else: if adjusted > 0: if adjusted", "if not degrees: val = val * 180. / math.pi if absolute: val", "\"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes class PAGIWorld(object): \"\"\"", "instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that we have an", "ny = math.cos(radjusted * -1 - 180) * z * -1 nx =", "solid, otherwise he'll do nothing. :return: bool True if agent has jumped (his", "if block: self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(self.__timeout) return response def __get_message_from_stack(self, code): \"\"\" Attempts to return", "Attempts to move the agent some number of paces (defined as one width", "11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self):", ":param actions: :return: \"\"\" if actions is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions,", "on conditions. :param name: :param conditions: :param actions: :return: \"\"\" if actions is", "'LEFT': raise ValueError(\"You can only use a L or R value for hands\")", "coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self):", "% (self.hand, x, y)) else: pass self.pagi_world.get_message(code=\"%sHvec\" % self.hand) def assert_left_or_right(direction): \"\"\" Checks", "16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response = self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns", "== -1: secondary = message[len(command)+1:] else: secondary = message[len(command)+1:end + len(command) + 1]", "0 90 agent 270 180 :param val: :type val: float :param degrees: :type", "if it isn't, raise exception :param direction: :return: \"\"\" if not direction.upper() ==", "If absolute is False, then vectors are relative to the direction agent is", "int :param direction: :type direction: str :return: \"\"\" assert_left_or_right(direction) val = 1 if", "of his periphal vision. vision[0][0] represents lower left of the vision field with", "val: float :param degrees: :type degrees: bool :param absolute: :type absolute: bool :return:", "elif x < 0: if y > 0: angle = math.acos(z / y)", "-1: break else: response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\"", "= float(y) if not absolute or (x == 0 and y == 0):", "a list of all the active reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\")", "\"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS = [\"S\", \"BP\", \"LP\", \"RP\", \"A\", \"MDN\", \"MPN\"] for i", "angle = math.acos(z / y) * 180 / math.pi else: angle = math.acos(z", "variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r',", "== -270: return z, 0 elif adjusted == 270 or adjusted == -90:", "can reset things if necessary :param task_file: :type task_file: str :raises: FileNotFoundError \"\"\"", "bool :return: \"\"\" x = float(x) y = float(y) if not absolute or", ":param x: :param y: :param n: :return: \"\"\" if description is None or", "socket to the given :param ip: :param port: :return: :raises: ConnectionRefusedError \"\"\" if", "(response[:len(code)] == code and response[len(code)] == \",\"): break else: self.message_stack.append(response) if block: self.pagi_socket.setblocking(False)", "\"\"\" if degrees: r = r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name,", "float :param absolute: :type absolute: bool :return: \"\"\" x = float(x) y =", "for i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for i in", "\"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name, conditions, actions=None): \"\"\"", "case we just use connect directly without creating new PAGIWorld instance) :return: \"\"\"", "specified column_length. :param response: :param column_length: :return: \"\"\" vision = list() current =", "not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was not found\" % task_file) self.__task_file", "that are currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return", "which contains all of his detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\")", "while True and response != \"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment +=", "= pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def jump(self): \"\"\"", ":type absolute: bool :return: \"\"\" x = float(x) y = float(y) if not", "detailed vision :return: \"\"\" self.pagi_world.send_message(\"sensorRequest,MDN\") response = self.pagi_world.get_message(code=\"MDN\").split(\",\") return self.__process_vision(response, 21) @staticmethod def", "def print_text(self, text): \"\"\" Print text to the PAGIworld console window. :param text:", "within PAGIworld. :param name: :type name: str :param length: :type length: int :return:", "__credits__ = [\"<NAME>\"] __license__ = \"MIT\" import math import os import socket import", "2) - math.pow(nx, 2)) return nx, ny def get_position(self): \"\"\" Gets x/y coordinates", "there's no response from the socket, after self.__timeout seconds, function will raise socket.timeout", "self.__task_file = task_file self.send_message(\"loadTask,%s\" % task_file) def reset_task(self): \"\"\" Resets the task to", "< 0: angle = 180 else: angle = 0 elif y == 0:", "j)) for i in range(0, 16): for j in range(0, 11): VALID_SENSORS.append(\"P%d.%d\" %", "reset things if necessary :param task_file: :type task_file: str :raises: FileNotFoundError \"\"\" if", "ERROR_CHECK: command = message[:message.find(\",\")] if command == \"\" or command not in VALID_COMMANDS:", ":param code: :type code: str :param block: :type block: bool :return: :raises: socket.timeout", "y > 0: angle = math.acos(z / y) * 180 / math.pi else:", "= [\"<NAME>\"] __license__ = \"MIT\" import math import os import socket import time", "0 while cnt < paces: self.send_force(x=(val * 1000), absolute=True) time.sleep(2) cnt += 1", "touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response = self.pagi_world.get_message(code=\"J\").split(\",\") return int(response[1]) ==", "true, then vector +y is world up, -y is world bottom, +x is", "return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a task in PAGIworld. We additionally", "self.pagi_socket = None self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment", "\"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\" Make sure that we have an existing socket", "valid PagiWorld variable to PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand", "import time ERROR_CHECK = True VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\",", "180 or adjusted == -180: return 0, (-1 * z) elif adjusted ==", "(name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states from PAGIworld by just", "He will only be able to if his bottom edge is touching something", "of all the active reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes =", "of agent, -y is bottom, +x is towards his right side, -x is", "def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param port: :return: \"\"\" self.pagi_socket", "x, y, m, ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent :type", "self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.connect(ip_address, port, timeout) self.agent", "get_rotation(self, degrees=True): \"\"\" Returns rotation in either degrees (0 - 359) or radians", "self.pagi_world.get_message(code=\"MPN\").split(\",\") return self.__process_vision(response, 16) def get_detailed_vision(self): \"\"\" Returns a list of ?x? points", "self.__assert_open_socket() if ERROR_CHECK: command = message[:message.find(\",\")] if command == \"\" or command not", "PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3): \"\"\" Create a socket to the given", "y != 0: ax = math.fabs(x) ay = math.fabs(y) hyp = math.sqrt(ax **", "/ x) * 180 / math.pi + 270 adjusted = rotation - angle", "the vision field with vision[10][15] representing upper right :return: list of size 11", "disable=too-many-instance-attributes class PAGIWorld(object): \"\"\" :type pagi_socket: socket.socket :type __ip_address: str :type __port: int", "other messages to a stack. If block is set to False, and there's", "elif adjusted < -180: nx = math.cos(radjusted * -1 - 90) * z", "\"\"\" Removes a reflex completely from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" %", "* z), 0 else: if adjusted > 0: if adjusted < 90: ny", ":return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\" \"Removes\" states", "command == \"addForce\" and secondary not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in", "or action, that it's a valid sensor or action to prevent bad calls.", "* z ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) return nx, ny def", ":return: \"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self,", "PAGIworld console window. :param text: :type text: str :return: \"\"\" text = str(text)", "with \\n if message[-1] != \"\\n\": message += \"\\n\" self.pagi_socket.send(message.encode()) def get_message(self, code=\"\",", "% (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish", "ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\" TODO: Finish and simplify", "-1) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 elif", "not in VALID_SENSORS: raise RuntimeError(\"Invalid sensor '%s' in message '%s'\" % (secondary, message))", "self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set a state within PAGIworld. :param name:", "else: angle = 0 elif y == 0: if x > 0: angle", "paces, direction='L'): \"\"\" Attempts to move the agent some number of paces (defined", "task_file: :type task_file: str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file", "his right side, -x is his left side. If absolute is true, then", "self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def get_periphal_vision(self): \"\"\" Returns a list of 11 (rows)", "else: angle = math.acos(z / x) * 180 / math.pi + 270 adjusted", "towards his right side, -x is his left side. If absolute is true,", "VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for i in range(0, 31): for j", "= message[:message.find(\",\")] if command == \"\" or command not in VALID_COMMANDS: raise RuntimeError(\"Invalid", "y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name, image_file, x, y, m,", "disconnect(self): \"\"\" Close the socket to PAGIWorld and then reset internal variables (in", "image_file, x, y, m, ph, r, e, k)) self.get_message(code=\"createItem\") class PAGIAgent(object): \"\"\" PAGIAgent", "then we rotate to position specified from 0 (looking up), otherwise rotate him", "left or right. :param paces: :type paces: int :param direction: :type direction: str", "the hand, grabbing anything it is touching :return: \"\"\" self.pagi_world.send_message(\"%sHG\" % self.hand) self.pagi_world.get_message(code=\"%sHG\"", "-= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val) self.pagi_world.get_message(code=\"BR\") def get_rotation(self, degrees=True): \"\"\" Returns rotation in", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket", "self.__get_message_from_stack(code) while True and response != \"\": while \"\\n\" not in self.__message_fragment: self.__message_fragment", "response = self.__message_fragment[:message_index] self.__message_fragment = self.__message_fragment[message_index+1:] if code == \"\" or (response[:len(code)] ==", "\"\" self.message_stack = list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\",", "ay = math.fabs(y) hyp = math.sqrt(ax ** 2 + ay ** 2) angle", "y_coord)) else: self.send_message(\"dropItem,%s,%f,%f,%s\" % (name, x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def", "range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return", "z, rotation): \"\"\" TODO: Finish and simplify :param x: :param y: :param z:", "timeout=3): \"\"\" Create a socket to the given :param ip: :param port: :return:", "connection. If we don't, exception will be raised. :return: :raises: RuntimeError \"\"\" if", "str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\" Set a state", "else: self.send_message(\"setReflex,%s,%s\" % (name, conditions)) self.get_message(code=\"setReflex\") def remove_reflex(self, name): \"\"\" Removes a reflex", "not in VALID_FORCES: raise RuntimeError(\"Invalid force '%s' in message '%s'\" % (secondary, message))", "a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\"", "side. If absolute is true, then vector +y is world up, -y is", "__process_vision(response, column_length): \"\"\" Internal method to process returned vision repsonse. Splits the response", "= self.__get_message_from_stack(code) while True and response != \"\": while \"\\n\" not in self.__message_fragment:", "\"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's rotation back to 0 degrees", "length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self,", "or adjusted == -270: return z, 0 elif adjusted == 270 or adjusted", "i in range(5): VALID_SENSORS.append(\"L%d\" % i) VALID_SENSORS.append(\"R%d\" % i) for i in range(0,", "x > 0: angle = 270 else: angle = 90 elif x <", "he'll be looking to the left at 90 degrees and if absolute is", "\"\"\" Resets the agent's rotation back to 0 degrees (looking upward) :return: \"\"\"", "all the active reflexes in PAGIworld :return: list \"\"\" self.send_message(\"getActiveReflexes\") reflexes = self.get_message(code=\"activeReflexes\").split(\",\")", "back to 0 degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val,", "response into a list of lists where each inner list is the length", ":type val: float :param degrees: :type degrees: bool :param absolute: :type absolute: bool", "port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self): \"\"\" Close the socket to PAGIWorld and then", "def get_all_states(self): \"\"\" Returns a list of all states that are currently in", "we have an existing socket connection. If we don't, exception will be raised.", "code: :return: str \"\"\" if len(self.message_stack) > 0: if code != \"\": for", "valid action type, as well verify that if the message is for a", "self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False) self.pagi_socket.settimeout(timeout) def disconnect(self):", "will only be able to if his bottom edge is touching something solid,", "radjusted = adjusted * math.pi / 180 if adjusted == 0: return 0,", "1 def reset_agent(self): \"\"\" Resets agent state back to a starting position (looking", "reflex in PAGIworld to be carried out on conditions. :param name: :param conditions:", "column_length == 0: if len(current) > 0: vision.append(current) current = list() current.append(response[j]) vision.append(current)", "states from PAGIworld by just setting it's duration to zero (so that can't", "\"\"\" self.send_message(\"setState,%s,0\" % name) self.get_message(code=\"setState\") def get_all_states(self): \"\"\" Returns a list of all", "degrees, and we tell him to rotate 90 degrees, if absolute is True,", "as one width of his body) to either the left or right. :param", "jump. He will only be able to if his bottom edge is touching", "if agent has jumped (his bottom is touching something solid) otherwise False \"\"\"", "-1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) else: if adjusted < -90:", "x != 0: z = math.fabs(x) else: z = math.fabs(y) nx, ny =", "z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) elif adjusted < 180: nx", "Opens the hand, releasing anything it could be holding :return: \"\"\" self.pagi_world.send_message(\"%sHR\" %", "0 elif adjusted == 270 or adjusted == -90: return (-1 * z),", "of all states that are currently in PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states", "vision.append(current) return vision def center_hands(self): \"\"\" Moves both of the agent's hands to", "the hand moving it :param x: :type x: float :param y: :type y:", "message)) # all messages must end with \\n if message[-1] != \"\\n\": message", "Returns a list of all the active reflexes in PAGIworld :return: list \"\"\"", "that if the message is for a sensor or action, that it's a", ":param y: :type y: float :param absolute: :type absolute: bool :return: \"\"\" if", "int(response[1]) == 1 def reset_agent(self): \"\"\" Resets agent state back to a starting", "is not None: self.send_message(\"setReflex,%s,%s,%s\" % (name, conditions, actions)) else: self.send_message(\"setReflex,%s,%s\" % (name, conditions))", "assert_left_or_right(direction): \"\"\" Checks that the given direction is either left or right, and", "\"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s' was not found\" %", "coordinates of the hand \"\"\" self.pagi_world.send_message(\"sensorRequest,%sP\" % self.hand) response = self.pagi_world.get_message(code=(\"%sP\" % self.hand)).split(\",\")", "11): VALID_SENSORS.append(\"P%d.%d\" % (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\",", "exception will be thrown, but program will stop in this function if socket", "stack :param code: :return: str \"\"\" if len(self.message_stack) > 0: if code !=", "or right. :param paces: :type paces: int :param direction: :type direction: str :return:", "x_coord, y_coord, description)) self.get_message(code=\"dropItem\") # pylint: disable=too-many-arguments def create_item(self, name, image_file, x, y,", "= list() current = list() for j in range(1, len(response)): if (j -", "float(response[-1]) rotation %= 360 if degrees: rotation = rotation * 180 / math.pi", "\"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\", \"LHR\"] # pylint: disable=too-many-instance-attributes", "the agent to try and jump. He will only be able to if", "RuntimeError(\"Invalid sensor '%s' in message '%s'\" % (secondary, message)) elif command == \"addForce\"", "absolute=False): \"\"\" Sends a vector force to the agent to move his body.", "to False, and there's no response from the socket, after self.__timeout seconds, function", "Use connect() to open a new socket connection\") def send_message(self, message): \"\"\" Send", "/ math.pi if absolute: val %= 360. val -= self.get_rotation() self.pagi_world.send_message(\"addForce,BR,%f\" % val)", "\"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\", \"BMV\", \"J\", \"BR\", \"RHG\", \"LHG\", \"RHR\",", "current = list() for j in range(1, len(response)): if (j - 1) %", "he'll do nothing. :return: bool True if agent has jumped (his bottom is", "connect directly without creating new PAGIWorld instance) :return: \"\"\" self.pagi_socket.close() def __assert_open_socket(self): \"\"\"", "\"\"\" def __init__(self, ip_address=\"\", port=42209, timeout=3): \"\"\" :param ip: :param port: :return: \"\"\"", "completely from PAGIworld :param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self):", "% (i, j)) VALID_FORCES = [\"RHvec\", \"LHvec\", \"BMvec\", \"RHH\", \"LHH\", \"RHV\", \"LHV\", \"BMH\",", "to position specified from 0 (looking up), otherwise rotate him relative to where", "return 0, z elif adjusted == 180 or adjusted == -180: return 0,", "if block: self.pagi_socket.setblocking(True) response = self.__get_message_from_stack(code) while True and response != \"\": while", "otherwise rotate him relative to where he's looking. Therefore, if he's looking down", "or it matches something on the message stack :param code: :return: str \"\"\"", "ay else: if x != 0: z = math.fabs(x) else: z = math.fabs(y)", "= math.cos(radjusted * -1) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2))", "* -1 elif adjusted < 270: ny = math.cos(radjusted - 180) * z", "things if necessary :param task_file: :type task_file: str :raises: FileNotFoundError \"\"\" if not", "if ERROR_CHECK: command = message[:message.find(\",\")] if command == \"\" or command not in", ":param direction: :type direction: str :return: \"\"\" assert_left_or_right(direction) val = 1 if direction[0].upper()", "\"\"\" Gets x/y coordinates of the agent in the world :return: tuple(float, float)", ":param name: :return: \"\"\" self.send_message(\"removeReflex,%s\" % name) self.get_message(code=\"removeReflex\") def get_all_reflexes(self): \"\"\" Returns a", "y, absolute=False): \"\"\" Sends a vector of force to the hand moving it", "column_length): \"\"\" Internal method to process returned vision repsonse. Splits the response into", "message from the socket, otherwise return the first matching message with that code,", "self.__timeout = timeout self.__message_fragment = \"\" self.__task_file = \"\" self.message_stack = list() self.connect(ip_address,", "bad calls. :param message: :type message: str :return: :raises: RuntimeError \"\"\" self.__assert_open_socket() if", "PagiAgent\") self.pagi_world = pagi_world self.left_hand = PAGIAgentHand('l', pagi_world) self.right_hand = PAGIAgentHand('r', pagi_world) def", "\"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\" Rotate the agent some", "of coordinates of agent \"\"\" self.pagi_world.send_message(\"sensorRequest,BP\") response = self.pagi_world.get_message(code=\"BP\").split(\",\") return float(response[1]), float(response[2]) def", "-90: ny = math.cos(radjusted * -1) * z nx = math.sqrt(math.pow(z, 2) -", "are the ones pre-built into PAGIworld. :param name: :param x: :param y: :param", "ip_address = socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment", "relative to the direction agent is looking, thus +y is always in direction", "self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\" % (name, image_file, x, y, m, ph, r, e, k)) self.get_message(code=\"createItem\") class", "z, 0 elif adjusted == 270 or adjusted == -90: return (-1 *", "then vectors are relative to the direction agent is looking, thus +y is", "elif adjusted < 270: ny = math.cos(radjusted - 180) * z * -1", "raise RuntimeError(\"Task file at '%s' was not found\" % task_file) self.__task_file = task_file", "-y is bottom, +x is towards his right side, -x is his left", "of agent (0 is looking upward) :param degrees: :type degrees: bool :return: \"\"\"", "[\"<NAME>\"] __license__ = \"MIT\" import math import os import socket import time ERROR_CHECK", "z * -1 nx = math.sqrt(math.pow(z, 2) - math.pow(ny, 2)) * -1 else:", "(x, y)) else: rotation = self.get_rotation() if x != 0 and y !=", "direction: :return: \"\"\" if not direction.upper() == 'R' and not direction.upper() == 'L'", "PAGIworld. We additionally save the task file name so we can reset things", "direction agent is looking, thus +y is always in direction of top of", "degrees (looking upward) :return: \"\"\" self.rotate(0, absolute=True) def rotate(self, val, degrees=True, absolute=False): \"\"\"", "absolute=False): \"\"\" Rotate the agent some number of degrees/radians. If absolute is True,", "> 0: angle = 270 else: angle = 90 elif x < 0:", "\"\"\" raise NotImplementedError class PAGIAgentHand(object): \"\"\" :type pagi_world: PAGIWorld \"\"\" def __init__(self, hand,", "is true, then vector +y is world up, -y is world bottom, +x", "self.__task_file = \"\" self.message_stack = list() self.pagi_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.pagi_socket.connect((ip_address, port)) self.pagi_socket.setblocking(False)", "a starting position (looking upward with hands in starting position) :return: \"\"\" self.reset_rotation()", "repsonse. Splits the response into a list of lists where each inner list", "do nothing. :return: bool True if agent has jumped (his bottom is touching", "return self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def load_task(self, task_file): \"\"\" Loads a", "in range(0, 21): VALID_SENSORS.append(\"V%d.%d\" % (i, j)) for i in range(0, 16): for", "\"\\n\" not in self.__message_fragment: self.__message_fragment += self.pagi_socket.recv(4096).decode() message_index = self.__message_fragment.find(\"\\n\") if message_index ==", "either the left or right. :param paces: :type paces: int :param direction: :type", "= socket.gethostbyname(socket.gethostname()) self.__ip_address = ip_address self.__port = port self.__timeout = timeout self.__message_fragment =", "length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" % (name, length)) self.get_message(code=\"setState\") def remove_state(self, name): \"\"\"", "then vector +y is world up, -y is world bottom, +x is world", "degrees: :return: \"\"\" if degrees: r = r * math.pi / 180. self.send_message(\"createItem,%s,%s,%f,%f,%f,%d,%f,%f,%d\"", "degrees=True): \"\"\" Returns rotation in either degrees (0 - 359) or radians (0", "world up, -y is world bottom, +x is world right and -x is", "180 if adjusted == 0: return 0, z elif adjusted == 180 or", "just setting it's duration to zero (so that can't ever really be in", "= math.cos(radjusted - 180) * z * -1 nx = math.sqrt(math.pow(z, 2) -", "90 degrees, if absolute is True, he'll be looking to the left at", "adjusted < 270: ny = math.cos(radjusted - 180) * z * -1 nx", "that can't ever really be in a state) :param name: :return: \"\"\" self.send_message(\"setState,%s,0\"", "0: if x > 0: angle = 270 else: angle = 90 elif", "actions=None): \"\"\" Sets a reflex in PAGIworld to be carried out on conditions.", "adjusted == 180 or adjusted == -180: return 0, (-1 * z) elif", "thrown, but program will stop in this function if socket doesn't return anything", "self.__task_file = \"\" self.message_stack = list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def", "VALID_COMMANDS = [\"sensorRequest\", \"addForce\", \"loadTask\", \"print\", \"findObj\", \"setState\", \"getActiveStates\", \"setReflex\", \"removeReflex\", \"getActiveReflexes\"] VALID_SENSORS", "= list() self.connect(ip_address, port, timeout) self.agent = PAGIAgent(self) def connect(self, ip_address=\"\", port=42209, timeout=3):", "= self.pagi_world.get_message(code=\"A\").split(\",\") rotation = float(response[-1]) rotation %= 360 if degrees: rotation = rotation", "% self.hand) self.pagi_world.get_message(code=\"%sHG\" % self.hand) def send_force(self, x, y, absolute=False): \"\"\" Sends a", "\"\"\" text = str(text) self.send_message(\"print,%s\" % text) self.get_message(code=\"print\") def set_state(self, name, length): \"\"\"", "open socket. Use connect() to open a new socket connection\") def send_message(self, message):", "upper right :return: list of size 11 x 16 \"\"\" self.pagi_world.send_message(\"sensorRequest,MPN\") response =", "for index in range(len(self.message_stack)): if self.message_stack[index][:len(code)] == code and \\ self.message_stack[index][len(code)] == \",\":", "180 / math.pi + 180 else: angle = math.acos(z / x) * 180", "* z * -1 ny = math.sqrt(math.pow(z, 2) - math.pow(nx, 2)) * -1", "self.__task_file == \"\" or self.__task_file is None: raise RuntimeError(\"Cannot reset task, no previous", "the given direction is either left or right, and if it isn't, raise", "% task_file) def reset_task(self): \"\"\" Resets the task to the one that was", "len(current) > 0: vision.append(current) current = list() current.append(response[j]) vision.append(current) return vision def center_hands(self):", "> 0: angle = math.acos(z / y) * 180 / math.pi else: angle", "PAGIworld. :return: list \"\"\" self.send_message(\"getActiveStates\") states = self.get_message(code=\"activeStates\").split(\",\") return states[1:] def set_reflex(self, name,", "m, ph, r, e, k, degrees=True): \"\"\" Creates a new item in PAGIworld", "messages to a stack. If block is set to False, and there's no", "2)) * -1 else: nx = math.cos(radjusted - 270) * z * -1", "ny) self.pagi_world.send_message(\"addForce,BMvec,%f,%f\" % (nx, ny)) self.pagi_world.get_message(code=\"BMvec\") @staticmethod def __get_relative_vector(x, y, z, rotation): \"\"\"", "name: :type name: str :param length: :type length: int :return: \"\"\" self.send_message(\"setState,%s,%d\" %", "a list of all states that are currently in PAGIworld. :return: list \"\"\"", "2) - math.pow(nx, 2)) * -1 elif adjusted < -270: ny = math.cos(radjusted", "\\ self.message_stack[index][len(code)] == \",\": return self.message_stack.pop(0) return None else: return self.message_stack.pop(0) def load_task(self,", "ny = math.cos(radjusted * -1) * z nx = math.sqrt(math.pow(z, 2) - math.pow(ny,", "in starting position) :return: \"\"\" self.reset_rotation() def reset_rotation(self): \"\"\" Resets the agent's rotation", "has jumped (his bottom is touching something solid) otherwise False \"\"\" self.pagi_world.send_message(\"addForce,J,1000\") response", "otherwise he'll do nothing. :return: bool True if agent has jumped (his bottom", "move_paces(self, paces, direction='L'): \"\"\" Attempts to move the agent some number of paces", "z), 0 else: if adjusted > 0: if adjusted < 90: ny =", "if absolute is False, he'll be looking to the right at 270 degrees", "* 180 / math.pi + 270 adjusted = rotation - angle radjusted =", "task_file: str :raises: FileNotFoundError \"\"\" if not os.path.isfile(task_file): raise RuntimeError(\"Task file at '%s'", "reflexes[1:] def drop_item(self, name, x_coord, y_coord, description=None): \"\"\" Creates an item and drops" ]
[ "--------------------\") print(\"a - Apply the Custom Harris Corner Detector to an image\") print(\"b", "Gaussian Filter Sigma Effect\") print(\"g - Use Bilateral Filter\") print(\"h - Visualize Bilateral", "3x3 Kernel and Apply Threshold\") print(\"e - Use Prewitt Filter\") print(\"f - Use", "if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER", "MAIN MENU ----------------\") print(\"1 - Noise Removal\") print(\"2 - Edge Extraction\") print(\"3 -", "print(\"i - Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt", "- Use Canny Edge Detector\") print(\"h - Generate an Animation of Canny\") print(\"i", "1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt == \"0\":", "== \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else: print(\"Invalid", "+ \" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt =", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif", "= str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt ==", "Harris\") print(\"c - Visualize the Effect of Sobel Kernel Aperture on Harris\") print(\"d", "input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif", "1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt == \"c\":", "Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt", "import subprocess import sys _PYTHON_INTERPRETER = sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\"", "print(\"3 - Corner Detection\") print(\"4 - Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt", "\"\" while _opt != '0': print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise Removal\")", "print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply the Custom Harris Corner Detector to", "== \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \"", "\" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt ==", "== \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass else: print(\"Invalid", "_opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else:", "Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if", "print(\"b - Use Mean Filter\") print(\"c - Visualize Mean Filter Anchor Effect\") print(\"d", "on Edge Detection\") print(\"b - Use Sobel Operator\") print(\"c - Use Scharr 3x3", "_opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER +", "\"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize Data Type Problems on Edge", "print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt ==", "_opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER +", "Data Type Problems on Edge Detection\") print(\"b - Use Sobel Operator\") print(\"c -", "while _opt != \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a - Plot Original Pictures\")", "print(\"c - Visualize Mean Filter Anchor Effect\") print(\"d - Use Median Filter\") print(\"e", "- Use Median Filter\") print(\"e - Use Gaussian Filter\") print(\"f - Visualize Gaussian", "_opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER +", "\" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt ==", "Apply the Custom Harris Corner Detector to an image\") print(\"b - Compare the", "Use Scharr 3x3 Kernel and Apply Threshold\") print(\"e - Use Prewitt Filter\") print(\"f", "of Block Size on Harris\") print(\"c - Visualize the Effect of Sobel Kernel", "_opt == \"2\": while _opt != \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a -", "the Effect of Block Size on Harris\") print(\"c - Visualize the Effect of", "+ \" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt", "Gaussian Filter\") print(\"f - Visualize Gaussian Filter Sigma Effect\") print(\"g - Use Bilateral", "- Use Roberts Filter\") print(\"g - Use Canny Edge Detector\") print(\"h - Generate", "_opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER +", "while _opt != \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a - Apply the Harris", "print(\"e - Use Gaussian Filter\") print(\"f - Visualize Gaussian Filter Sigma Effect\") print(\"g", "= input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\")", "\"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\")", "3x3 Kernel\") print(\"d - Use Scharr 3x3 Kernel and Apply Threshold\") print(\"e -", "+ \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt", "== \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \"", "the Harris to an image\") print(\"b - Visualize the Effect of Block Size", "- Apply the Custom Harris Corner Detector to an image\") print(\"b - Compare", "print(\"Invalid Option!\") _opt = \"\" elif _opt == \"0\": pass else: print(\"Invalid Option!\")", "<filename>code_python/Easy_Run.py import subprocess import sys _PYTHON_INTERPRETER = sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt =", "_opt == \"1\": while _opt != \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a -", "\" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt ==", "\" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt ==", "== \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \"", "\"1\": while _opt != \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a - Plot Original", "Operator\") print(\"c - Use Scharr 3x3 Kernel\") print(\"d - Use Scharr 3x3 Kernel", "Filter Anchor Effect\") print(\"d - Use Median Filter\") print(\"e - Use Gaussian Filter\")", "Scharr 3x3 Kernel and Apply Threshold\") print(\"e - Use Prewitt Filter\") print(\"f -", "- Visualize Data Type Problems on Edge Detection\") print(\"b - Use Sobel Operator\")", "sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while _opt != '0': print(\"---------------- MAIN", "else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"3\": while _opt !=", "else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"2\": while _opt !=", "Detection\") print(\"b - Use Sobel Operator\") print(\"c - Use Scharr 3x3 Kernel\") print(\"d", "image\") print(\"b - Visualize the Effect of Block Size on Harris\") print(\"c -", "_opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER +", "\" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt ==", "= \"\" while _opt != '0': print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif", "_opt = \"\" elif _opt == \"3\": while _opt != \"0\": print(\"---------------- Corner", "== \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \"", "Median Filter\") print(\"e - Use Gaussian Filter\") print(\"f - Visualize Gaussian Filter Sigma", "\"2\": while _opt != \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize Data", "my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt == \"0\":", "- Plot Original Pictures\") print(\"b - Use Mean Filter\") print(\"c - Visualize Mean", "_opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER +", "- Use Prewitt Filter\") print(\"f - Use Roberts Filter\") print(\"g - Use Canny", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif", "Harris\") print(\"e - Generate an Animation Sweeping the Harris Parameters\") print(\"0 - Exit\")", "'0': print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise Removal\") print(\"2 - Edge Extraction\")", "print(\"b - Compare the OpenCV and Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\")", "if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER", "Canny Edge Detector\") print(\"h - Generate an Animation of Canny\") print(\"i - Use", "Edge Detection\") print(\"b - Use Sobel Operator\") print(\"c - Use Scharr 3x3 Kernel\")", "Pictures\") print(\"b - Use Mean Filter\") print(\"c - Visualize Mean Filter Anchor Effect\")", "----------------\") print(\"a - Apply the Harris to an image\") print(\"b - Visualize the", "\"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\")", "\"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\")", "on Harris\") print(\"d - Visualize the Effect of Harris Free Parameter on Harris\")", "== \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \"", "- Visualize the Effect of Block Size on Harris\") print(\"c - Visualize the", "Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if", "print(\"a - Apply the Harris to an image\") print(\"b - Visualize the Effect", "_opt = \"\" elif _opt == \"4\": while _opt != \"0\": print(\"-------------------- Custom", "_opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER +", "_opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER +", "_opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt", "\"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\")", "_opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER +", "Harris to an image\") print(\"b - Visualize the Effect of Block Size on", "Noise Removal\") print(\"2 - Edge Extraction\") print(\"3 - Corner Detection\") print(\"4 - Custom", "if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER", "_opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER +", "elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER", "+ \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt =", "1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\":", "_opt != \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a - Apply the Harris to", "\"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\")", "Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if", "- Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\":", "print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt ==", "_opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt", "Corner Detection\") print(\"4 - Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\")", "import sys _PYTHON_INTERPRETER = sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while _opt", "\"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\")", "+ \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt", "the OpenCV and Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\")", "and Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt =", "= input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\")", "\"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\")", "- Apply the Harris to an image\") print(\"b - Visualize the Effect of", "elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER", "== \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"3\":", "subprocess import sys _PYTHON_INTERPRETER = sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while", "if _opt == \"1\": while _opt != \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a", "1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt == \"e\":", "Edge Extraction ----------------\") print(\"a - Visualize Data Type Problems on Edge Detection\") print(\"b", "Scharr 3x3 Kernel\") print(\"d - Use Scharr 3x3 Kernel and Apply Threshold\") print(\"e", "== \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \"", "Visualize Gaussian Filter Sigma Effect\") print(\"g - Use Bilateral Filter\") print(\"h - Visualize", "- Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\":", "print(\"h - Generate an Animation of Canny\") print(\"i - Use Laplacian Filter\") print(\"0", "\"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\")", "input(\"Option:\\t\") _opt = str(_opt) if _opt == \"1\": while _opt != \"0\": print(\"----------------", "input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif", "----------------\") print(\"a - Plot Original Pictures\") print(\"b - Use Mean Filter\") print(\"c -", "----------------\") print(\"1 - Noise Removal\") print(\"2 - Edge Extraction\") print(\"3 - Corner Detection\")", "\" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\"", "Use Mean Filter\") print(\"c - Visualize Mean Filter Anchor Effect\") print(\"d - Use", "_opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"1\": while _opt !=", "_opt != \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply the Custom Harris", "subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif", "_opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt", "_opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt", "1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt == \"e\":", "an Animation of Canny\") print(\"i - Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\")", "_opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER +", "_opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else:", "\"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\")", "+ \" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt", "\"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"0\": pass", "elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER", "Use Canny Edge Detector\") print(\"h - Generate an Animation of Canny\") print(\"i -", "- Compare the OpenCV and Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt", "== \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \"", "Edge Detector\") print(\"h - Generate an Animation of Canny\") print(\"i - Use Laplacian", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif", "\" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt ==", "- Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt =", "\"\" elif _opt == \"3\": while _opt != \"0\": print(\"---------------- Corner Detection ----------------\")", "print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt ==", "\"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\")", "\"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\")", "1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt == \"0\":", "Filter\") print(\"e - Use Gaussian Filter\") print(\"f - Visualize Gaussian Filter Sigma Effect\")", "!= \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply the Custom Harris Corner", "elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass", "Detection\") print(\"4 - Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt", "Threshold\") print(\"e - Use Prewitt Filter\") print(\"f - Use Roberts Filter\") print(\"g -", "print(\"c - Visualize the Effect of Sobel Kernel Aperture on Harris\") print(\"d -", "print(\"Invalid Option!\") _opt = \"\" elif _opt == \"3\": while _opt != \"0\":", "print(\"h - Visualize Bilateral Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt =", "elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER", "\"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\")", "Detection ----------------\") print(\"a - Apply the Harris to an image\") print(\"b - Visualize", "Plot Original Pictures\") print(\"b - Use Mean Filter\") print(\"c - Visualize Mean Filter", "1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\":", "\"\" elif _opt == \"2\": while _opt != \"0\": print(\"---------------- Edge Extraction ----------------\")", "1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif", "str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt == \"b\":", "Sobel Operator\") print(\"c - Use Scharr 3x3 Kernel\") print(\"d - Use Scharr 3x3", "OpenCV and Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt", "= str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt ==", "== \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else: print(\"Invalid", "- Visualize the Effect of Sobel Kernel Aperture on Harris\") print(\"d - Visualize", "elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER", "- Use Sobel Operator\") print(\"c - Use Scharr 3x3 Kernel\") print(\"d - Use", "+ \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt", "on Harris\") print(\"e - Generate an Animation Sweeping the Harris Parameters\") print(\"0 -", "_opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER +", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif", "- Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"1\":", "_opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER +", "the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt)", "\" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt ==", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif", "\" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt ==", "- Generate an Animation of Canny\") print(\"i - Use Laplacian Filter\") print(\"0 -", "\"\" elif _opt == \"4\": while _opt != \"0\": print(\"-------------------- Custom Harris --------------------\")", "elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif", "elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER", "- Visualize Bilateral Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\")", "else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"4\": while _opt !=", "\"4\": while _opt != \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply the", "_opt = str(_opt) if _opt == \"1\": while _opt != \"0\": print(\"---------------- Noise", "= input(\"Option:\\t\") _opt = str(_opt) if _opt == \"1\": while _opt != \"0\":", "Generate an Animation of Canny\") print(\"i - Use Laplacian Filter\") print(\"0 - Exit\")", "1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif", "print(\"2 - Edge Extraction\") print(\"3 - Corner Detection\") print(\"4 - Custom Harris\") print(\"0", "1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\":", "elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER", "subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif", "elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER", "\"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"4\": while", "while _opt != '0': print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise Removal\") print(\"2", "\" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt ==", "- Use Mean Filter\") print(\"c - Visualize Mean Filter Anchor Effect\") print(\"d -", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif", "\" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt ==", "_opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER +", "\"0\": print(\"---------------- Noise Remove ----------------\") print(\"a - Plot Original Pictures\") print(\"b - Use", "elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt", "_opt != \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize Data Type Problems", "elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER", "print(\"d - Visualize the Effect of Harris Free Parameter on Harris\") print(\"e -", "\" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt ==", "of Sobel Kernel Aperture on Harris\") print(\"d - Visualize the Effect of Harris", "\"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply the Custom Harris Corner Detector", "+ \" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt =", "pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"3\": while _opt", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif", "- Use Bilateral Filter\") print(\"h - Visualize Bilateral Filter Sigma Effect\") print(\"0 -", "while _opt != \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize Data Type", "Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if", "\"3\": while _opt != \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a - Apply the", "Extraction\") print(\"3 - Corner Detection\") print(\"4 - Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\")", "\"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\")", "_opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \"", "Detector to an image\") print(\"b - Compare the OpenCV and Custom Harris results\")", "Sweeping the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt =", "1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt == \"h\":", "print(\"4 - Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt =", "Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt", "an Animation Sweeping the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\")", "== \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else: print(\"Invalid", "Type Problems on Edge Detection\") print(\"b - Use Sobel Operator\") print(\"c - Use", "Kernel\") print(\"d - Use Scharr 3x3 Kernel and Apply Threshold\") print(\"e - Use", "print(\"e - Generate an Animation Sweeping the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\")", "== \"1\": while _opt != \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a - Plot", "print(\"a - Visualize Data Type Problems on Edge Detection\") print(\"b - Use Sobel", "Visualize the Effect of Sobel Kernel Aperture on Harris\") print(\"d - Visualize the", "= sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while _opt != '0': print(\"----------------", "Filter\") print(\"h - Visualize Bilateral Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt", "str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\":", "_opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER +", "if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER", "subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt", "str(_opt) if _opt == \"1\": while _opt != \"0\": print(\"---------------- Noise Remove ----------------\")", "elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass", "Effect of Sobel Kernel Aperture on Harris\") print(\"d - Visualize the Effect of", "Canny\") print(\"i - Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\")", "print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"1\": while _opt", "Filter\") print(\"f - Visualize Gaussian Filter Sigma Effect\") print(\"g - Use Bilateral Filter\")", "print(\"b - Use Sobel Operator\") print(\"c - Use Scharr 3x3 Kernel\") print(\"d -", "image\") print(\"b - Compare the OpenCV and Custom Harris results\") print(\"0 - Exit\")", "= \"\" elif _opt == \"4\": while _opt != \"0\": print(\"-------------------- Custom Harris", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt", "to an image\") print(\"b - Visualize the Effect of Block Size on Harris\")", "Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER", "Effect\") print(\"d - Use Median Filter\") print(\"e - Use Gaussian Filter\") print(\"f -", "elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER", "\"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\")", "subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt", "subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif", "+ \" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt", "elif _opt == \"2\": while _opt != \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a", "= input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\")", "Remove ----------------\") print(\"a - Plot Original Pictures\") print(\"b - Use Mean Filter\") print(\"c", "Anchor Effect\") print(\"d - Use Median Filter\") print(\"e - Use Gaussian Filter\") print(\"f", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif", "Option!\") _opt = \"\" elif _opt == \"4\": while _opt != \"0\": print(\"--------------------", "Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if", "the Effect of Sobel Kernel Aperture on Harris\") print(\"d - Visualize the Effect", "\" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt ==", "the Custom Harris Corner Detector to an image\") print(\"b - Compare the OpenCV", "an image\") print(\"b - Compare the OpenCV and Custom Harris results\") print(\"0 -", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt", "_opt == \"3\": while _opt != \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a -", "print(\"a - Apply the Custom Harris Corner Detector to an image\") print(\"b -", "input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif", "subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif", "\" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\"", "\"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\")", "Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt", "Use Gaussian Filter\") print(\"f - Visualize Gaussian Filter Sigma Effect\") print(\"g - Use", "elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER", "input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif", "== \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \"", "+ \" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt", "Filter Sigma Effect\") print(\"g - Use Bilateral Filter\") print(\"h - Visualize Bilateral Filter", "Use Scharr 3x3 Kernel\") print(\"d - Use Scharr 3x3 Kernel and Apply Threshold\")", "_opt = \"\" elif _opt == \"2\": while _opt != \"0\": print(\"---------------- Edge", "elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass", "\"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\")", "\" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt ==", "1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt == \"g\":", "== \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \"", "subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif", "- Use Scharr 3x3 Kernel and Apply Threshold\") print(\"e - Use Prewitt Filter\")", "== \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \"", "Custom Harris Corner Detector to an image\") print(\"b - Compare the OpenCV and", "Problems on Edge Detection\") print(\"b - Use Sobel Operator\") print(\"c - Use Scharr", "1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\":", "1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\":", "!= \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize Data Type Problems on", "+ \" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt", "\" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\"", "Effect of Harris Free Parameter on Harris\") print(\"e - Generate an Animation Sweeping", "+ \" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt", "Use Prewitt Filter\") print(\"f - Use Roberts Filter\") print(\"g - Use Canny Edge", "_PYTHON_INTERPRETER = sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while _opt != '0':", "\"0\": print(\"---------------- Corner Detection ----------------\") print(\"a - Apply the Harris to an image\")", "Edge Extraction\") print(\"3 - Corner Detection\") print(\"4 - Custom Harris\") print(\"0 - Exit\")", "1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt == \"c\":", "Effect\") print(\"g - Use Bilateral Filter\") print(\"h - Visualize Bilateral Filter Sigma Effect\")", "== \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \"", "print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER +", "Visualize the Effect of Harris Free Parameter on Harris\") print(\"e - Generate an", "elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER", "1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt == \"i\":", "+ \" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt", "_opt != \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a - Plot Original Pictures\") print(\"b", "results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt", "Removal\") print(\"2 - Edge Extraction\") print(\"3 - Corner Detection\") print(\"4 - Custom Harris\")", "- Noise Removal\") print(\"2 - Edge Extraction\") print(\"3 - Corner Detection\") print(\"4 -", "== \"3\": while _opt != \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a - Apply", "of Harris Free Parameter on Harris\") print(\"e - Generate an Animation Sweeping the", "Original Pictures\") print(\"b - Use Mean Filter\") print(\"c - Visualize Mean Filter Anchor", "pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"4\": while _opt", "== \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \"", "else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"0\": pass else: print(\"Invalid", "str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt == \"b\":", "print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER +", "\"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\")", "Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"1\": while", "- Use Gaussian Filter\") print(\"f - Visualize Gaussian Filter Sigma Effect\") print(\"g -", "1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt == \"f\":", "== \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \"", "subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif", "Harris Free Parameter on Harris\") print(\"e - Generate an Animation Sweeping the Harris", "Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt)", "\" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt ==", "= sys.path[0] _opt = \"\" while _opt != '0': print(\"---------------- MAIN MENU ----------------\")", "elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER", "sys _PYTHON_INTERPRETER = sys.executable _CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while _opt !=", "Compare the OpenCV and Custom Harris results\") print(\"0 - Exit\") print(\"-------------------------------------------------------\") _opt =", "MENU ----------------\") print(\"1 - Noise Removal\") print(\"2 - Edge Extraction\") print(\"3 - Corner", "Animation of Canny\") print(\"i - Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt", "Animation Sweeping the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt", "_opt != '0': print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise Removal\") print(\"2 -", "Generate an Animation Sweeping the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt =", "elif _opt == \"4\": while _opt != \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a", "Use Roberts Filter\") print(\"g - Use Canny Edge Detector\") print(\"h - Generate an", "Prewitt Filter\") print(\"f - Use Roberts Filter\") print(\"g - Use Canny Edge Detector\")", "elif _opt == \"3\": while _opt != \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a", "Apply Threshold\") print(\"e - Use Prewitt Filter\") print(\"f - Use Roberts Filter\") print(\"g", "\"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"2\": while", "Parameter on Harris\") print(\"e - Generate an Animation Sweeping the Harris Parameters\") print(\"0", "Custom Harris --------------------\") print(\"a - Apply the Custom Harris Corner Detector to an", "Block Size on Harris\") print(\"c - Visualize the Effect of Sobel Kernel Aperture", "1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt == \"e\":", "Filter\") print(\"c - Visualize Mean Filter Anchor Effect\") print(\"d - Use Median Filter\")", "pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"0\": pass else:", "elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER", "\"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\")", "- Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt)", "print(\"---------------- Corner Detection ----------------\") print(\"a - Apply the Harris to an image\") print(\"b", "print(\"d - Use Median Filter\") print(\"e - Use Gaussian Filter\") print(\"f - Visualize", "print(\"1 - Noise Removal\") print(\"2 - Edge Extraction\") print(\"3 - Corner Detection\") print(\"4", "Use Sobel Operator\") print(\"c - Use Scharr 3x3 Kernel\") print(\"d - Use Scharr", "- Visualize Mean Filter Anchor Effect\") print(\"d - Use Median Filter\") print(\"e -", "elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER", "print(\"g - Use Bilateral Filter\") print(\"h - Visualize Bilateral Filter Sigma Effect\") print(\"0", "+ \" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt", "- Use Scharr 3x3 Kernel\") print(\"d - Use Scharr 3x3 Kernel and Apply", "print(\"f - Visualize Gaussian Filter Sigma Effect\") print(\"g - Use Bilateral Filter\") print(\"h", "Kernel and Apply Threshold\") print(\"e - Use Prewitt Filter\") print(\"f - Use Roberts", "_opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass else:", "_CURRENT_DIRECTORY = sys.path[0] _opt = \"\" while _opt != '0': print(\"---------------- MAIN MENU", "- Corner Detection\") print(\"4 - Custom Harris\") print(\"0 - Exit\") print(\"-------------------------------------------\") _opt =", "an image\") print(\"b - Visualize the Effect of Block Size on Harris\") print(\"c", "\"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\")", "\" 1.1/gaussian-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter-sigma.py\") elif _opt ==", "== \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \"", "== \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \"", "print(\"e - Use Prewitt Filter\") print(\"f - Use Roberts Filter\") print(\"g - Use", "== \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \"", "+ \" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif _opt", "Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER", "\" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\") elif _opt ==", "print(\"c - Use Scharr 3x3 Kernel\") print(\"d - Use Scharr 3x3 Kernel and", "\" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\"", "----------------\") print(\"a - Visualize Data Type Problems on Edge Detection\") print(\"b - Use", "== \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \"", "Filter\") print(\"g - Use Canny Edge Detector\") print(\"h - Generate an Animation of", "Option!\") _opt = \"\" elif _opt == \"2\": while _opt != \"0\": print(\"----------------", "== \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \"", "_opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt ==", "Visualize Bilateral Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt", "on Harris\") print(\"c - Visualize the Effect of Sobel Kernel Aperture on Harris\")", "Extraction ----------------\") print(\"a - Visualize Data Type Problems on Edge Detection\") print(\"b -", "Visualize Mean Filter Anchor Effect\") print(\"d - Use Median Filter\") print(\"e - Use", "Exit\") print(\"-------------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER", "_opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER +", "Kernel Aperture on Harris\") print(\"d - Visualize the Effect of Harris Free Parameter", "\"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter-threshold.py\")", "+ \" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt", "print(\"Invalid Option!\") _opt = \"\" elif _opt == \"4\": while _opt != \"0\":", "== \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"0\":", "Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt", "_opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else:", "Bilateral Filter\") print(\"h - Visualize Bilateral Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\")", "1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt == \"h\":", "== \"2\": while _opt != \"0\": print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize", "Harris Corner Detector to an image\") print(\"b - Compare the OpenCV and Custom", "Effect of Block Size on Harris\") print(\"c - Visualize the Effect of Sobel", "and Apply Threshold\") print(\"e - Use Prewitt Filter\") print(\"f - Use Roberts Filter\")", "Option!\") _opt = \"\" elif _opt == \"3\": while _opt != \"0\": print(\"----------------", "Aperture on Harris\") print(\"d - Visualize the Effect of Harris Free Parameter on", "_opt = \"\" while _opt != '0': print(\"---------------- MAIN MENU ----------------\") print(\"1 -", "- Visualize Gaussian Filter Sigma Effect\") print(\"g - Use Bilateral Filter\") print(\"h -", "- Generate an Animation Sweeping the Harris Parameters\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt", "\" 1.1/gaussian-filter-sigma.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt ==", "Use Median Filter\") print(\"e - Use Gaussian Filter\") print(\"f - Visualize Gaussian Filter", "\"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\")", "== \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"4\":", "Harris --------------------\") print(\"a - Apply the Custom Harris Corner Detector to an image\")", "- Edge Extraction\") print(\"3 - Corner Detection\") print(\"4 - Custom Harris\") print(\"0 -", "_opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER +", "print(\"g - Use Canny Edge Detector\") print(\"h - Generate an Animation of Canny\")", "\" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt ==", "_opt == \"4\": while _opt != \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a -", "\" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt ==", "Bilateral Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt =", "- Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\":", "sys.path[0] _opt = \"\" while _opt != '0': print(\"---------------- MAIN MENU ----------------\") print(\"1", "_opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER +", "the Effect of Harris Free Parameter on Harris\") print(\"e - Generate an Animation", "Filter\") print(\"f - Use Roberts Filter\") print(\"g - Use Canny Edge Detector\") print(\"h", "= \"\" elif _opt == \"2\": while _opt != \"0\": print(\"---------------- Edge Extraction", "_opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER +", "my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif", "print(\"---------------- Edge Extraction ----------------\") print(\"a - Visualize Data Type Problems on Edge Detection\")", "= input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\")", "Noise Remove ----------------\") print(\"a - Plot Original Pictures\") print(\"b - Use Mean Filter\")", "= str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt ==", "1.2/laplacian-filter.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif", "Filter Sigma Effect\") print(\"0 - Exit\") print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt)", "while _opt != \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply the Custom", "pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"2\": while _opt", "\"b\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris-compare.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\")", "\"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"3\": while", "+ \" 1.2/sobel-filter-ddepth.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter.py\") elif _opt", "print(\"d - Use Scharr 3x3 Kernel and Apply Threshold\") print(\"e - Use Prewitt", "print(\"f - Use Roberts Filter\") print(\"g - Use Canny Edge Detector\") print(\"h -", "Sigma Effect\") print(\"g - Use Bilateral Filter\") print(\"h - Visualize Bilateral Filter Sigma", "Sobel Kernel Aperture on Harris\") print(\"d - Visualize the Effect of Harris Free", "\"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt == \"f\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/roberts-filter.py\")", "print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt ==", "print(\"---------------- Noise Remove ----------------\") print(\"a - Plot Original Pictures\") print(\"b - Use Mean", "Mean Filter\") print(\"c - Visualize Mean Filter Anchor Effect\") print(\"d - Use Median", "Roberts Filter\") print(\"g - Use Canny Edge Detector\") print(\"h - Generate an Animation", "Harris\") print(\"d - Visualize the Effect of Harris Free Parameter on Harris\") print(\"e", "1.2/sobel-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/scharr-filter.py\") elif _opt == \"d\":", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif", "== \"4\": while _opt != \"0\": print(\"-------------------- Custom Harris --------------------\") print(\"a - Apply", "str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/original-pictures.py\") elif _opt == \"b\":", "print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise Removal\") print(\"2 - Edge Extraction\") print(\"3", "print(\"a - Plot Original Pictures\") print(\"b - Use Mean Filter\") print(\"c - Visualize", "Corner Detector to an image\") print(\"b - Compare the OpenCV and Custom Harris", "1.2/scharr-filter-threshold.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/prewitt-filter.py\") elif _opt == \"f\":", "!= \"0\": print(\"---------------- Noise Remove ----------------\") print(\"a - Plot Original Pictures\") print(\"b -", "== \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \"", "subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/mean-filter-anchor.py\") elif", "Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt)", "Apply the Harris to an image\") print(\"b - Visualize the Effect of Block", "Free Parameter on Harris\") print(\"e - Generate an Animation Sweeping the Harris Parameters\")", "+ \" 1.2/canny-filter.py\") elif _opt == \"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter-animate.py\") elif _opt", "!= \"0\": print(\"---------------- Corner Detection ----------------\") print(\"a - Apply the Harris to an", "Detector\") print(\"h - Generate an Animation of Canny\") print(\"i - Use Laplacian Filter\")", "Use Bilateral Filter\") print(\"h - Visualize Bilateral Filter Sigma Effect\") print(\"0 - Exit\")", "Visualize the Effect of Block Size on Harris\") print(\"c - Visualize the Effect", "\" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-bsize.py\") elif _opt ==", "= str(_opt) if _opt == \"1\": while _opt != \"0\": print(\"---------------- Noise Remove", "_opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" my_Harris/my-harris.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER +", "+ \" 1.1/median-filter.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/gaussian-filter.py\") elif _opt", "= \"\" elif _opt == \"3\": while _opt != \"0\": print(\"---------------- Corner Detection", "= str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/sobel-filter-ddepth.py\") elif _opt ==", "+ \" 1.3/harris-detector-bsize.py\") elif _opt == \"c\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-ksize.py\") elif _opt", "Visualize Data Type Problems on Edge Detection\") print(\"b - Use Sobel Operator\") print(\"c", "print(\"b - Visualize the Effect of Block Size on Harris\") print(\"c - Visualize", "+ \" 1.1/mean-filter-anchor.py\") elif _opt == \"d\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/median-filter.py\") elif _opt", "of Canny\") print(\"i - Use Laplacian Filter\") print(\"0 - Exit\") print(\"-------------------------------------------------\") _opt =", "\"h\": subprocess.run(_PYTHON_INTERPRETER + \" 1.1/bilateral-filter-sigma.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\")", "Corner Detection ----------------\") print(\"a - Apply the Harris to an image\") print(\"b -", "print(\"----------------------------------------------\") _opt = input(\"Option:\\t\") _opt = str(_opt) if _opt == \"a\": subprocess.run(_PYTHON_INTERPRETER +", "!= '0': print(\"---------------- MAIN MENU ----------------\") print(\"1 - Noise Removal\") print(\"2 - Edge", "+ \" 1.2/roberts-filter.py\") elif _opt == \"g\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/canny-filter.py\") elif _opt", "print(\"Invalid Option!\") _opt = \"\" elif _opt == \"2\": while _opt != \"0\":", "+ \" 1.3/harris-detector-k.py\") elif _opt == \"e\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector-animate.py\") elif _opt", "Size on Harris\") print(\"c - Visualize the Effect of Sobel Kernel Aperture on", "Mean Filter Anchor Effect\") print(\"d - Use Median Filter\") print(\"e - Use Gaussian", "elif _opt == \"i\": subprocess.run(_PYTHON_INTERPRETER + \" 1.2/laplacian-filter.py\") elif _opt == \"0\": pass", "+ \" 1.3/harris-detector-animate.py\") elif _opt == \"0\": pass else: print(\"Invalid Option!\") _opt =", "== \"0\": pass else: print(\"Invalid Option!\") _opt = \"\" elif _opt == \"2\":", "- Visualize the Effect of Harris Free Parameter on Harris\") print(\"e - Generate", "== \"a\": subprocess.run(_PYTHON_INTERPRETER + \" 1.3/harris-detector.py\") elif _opt == \"b\": subprocess.run(_PYTHON_INTERPRETER + \"", "to an image\") print(\"b - Compare the OpenCV and Custom Harris results\") print(\"0" ]
[ "in range(len(row)): if i != 0: output += \" \" + str(row[i]) output", "1: row[4] = 2 else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0:", "in pedArray: output = str(row[0]) for i in range(len(row)): if i != 0:", "1: row[5] = 2 else: row[5] = -9 file = open(sys.argv[3], 'a') output", "for row in reader: row.pop() if count != 0: readRow = [i for", "readRow count += 1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray = [] for", "if csvArray[row[1]][5] == 1: row[4] = 2 else: row[4] = 1 if max(csvArray[row[1]][9],", "pedArray: output = str(row[0]) for i in range(len(row)): if i != 0: output", "output = str(row[0]) for i in range(len(row)): if i != 0: output +=", "= 2 else: row[5] = -9 file = open(sys.argv[3], 'a') output = \"\"", "csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] = -9 if csvArray[i][9] == '': csvArray[i][9]", "user from a CSV file.\"\"\" count = 0 fileName = sys.argv[1] csvArray =", "remove.\"\"\" rmArray = [] for i in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12]", "row] csvArray[readRow[2]] = readRow count += 1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray", "-9 if csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\" pedArray", "= 2 else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] =", "data passed by the user from a CSV file.\"\"\" count = 0 fileName", "= 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2 else: row[5] =", "= -9 file = open(sys.argv[3], 'a') output = \"\" for row in pedArray:", "in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] = -9 if csvArray[i][9] == '':", "row in pedArray: output = str(row[0]) for i in range(len(row)): if i !=", "file = open(sys.argv[3], 'a') output = \"\" for row in pedArray: output =", "in row] csvArray[readRow[2]] = readRow count += 1 csvFile.close() \"\"\"Find rows to remove.\"\"\"", "= -9 \"\"\"Read in ped file\"\"\" pedArray = [] for line in open(sys.argv[2]):", "if count != 0: readRow = [i for i in row] csvArray[readRow[2]] =", "row[5] = 2 else: row[5] = -9 file = open(sys.argv[3], 'a') output =", "1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray = [] for i in csvArray.keys():", "2 else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1", "row in pedArray: if csvArray[row[1]][5] == 1: row[4] = 2 else: row[4] =", "file\"\"\" pedArray = [] for line in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray:", "reader = csv.reader(csvFile) for row in reader: row.pop() if count != 0: readRow", "row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2 else: row[5]", "!= 0: readRow = [i for i in row] csvArray[readRow[2]] = readRow count", "== '': csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\" pedArray = [] for", "the user from a CSV file.\"\"\" count = 0 fileName = sys.argv[1] csvArray", "== 0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2", "csv import sys csv.field_size_limit(sys.maxsize) \"\"\"Reads in data passed by the user from a", "output = \"\" for row in pedArray: output = str(row[0]) for i in", "max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5]", "-9 \"\"\"Read in ped file\"\"\" pedArray = [] for line in open(sys.argv[2]): pedArray.append(line.split())", "for row in pedArray: if csvArray[row[1]][5] == 1: row[4] = 2 else: row[4]", "in pedArray: if csvArray[row[1]][5] == 1: row[4] = 2 else: row[4] = 1", "for i in range(len(row)): if i != 0: output += \" \" +", "row[5] = -9 file = open(sys.argv[3], 'a') output = \"\" for row in", "= [i for i in row] csvArray[readRow[2]] = readRow count += 1 csvFile.close()", "csvFile: reader = csv.reader(csvFile) for row in reader: row.pop() if count != 0:", "in reader: row.pop() if count != 0: readRow = [i for i in", "-9 file = open(sys.argv[3], 'a') output = \"\" for row in pedArray: output", "csvArray[readRow[2]] = readRow count += 1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray =", "count += 1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray = [] for i", "in data passed by the user from a CSV file.\"\"\" count = 0", "count = 0 fileName = sys.argv[1] csvArray = {} with open(fileName) as csvFile:", "\"\"\"Find rows to remove.\"\"\" rmArray = [] for i in csvArray.keys(): if csvArray[i][12]", "csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\" pedArray = []", "rows to remove.\"\"\" rmArray = [] for i in csvArray.keys(): if csvArray[i][12] ==", "csvArray[i][12] = -9 if csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read in ped", "open(sys.argv[3], 'a') output = \"\" for row in pedArray: output = str(row[0]) for", "csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\" pedArray = [] for line in", "i != 0: output += \" \" + str(row[i]) output += \"\\n\" file.write(output)", "readRow = [i for i in row] csvArray[readRow[2]] = readRow count += 1", "csvArray[row[1]][12]) == 0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] =", "\"\"\"Reads in data passed by the user from a CSV file.\"\"\" count =", "open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5] == 1: row[4] = 2", "row[4] = 2 else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5]", "if i != 0: output += \" \" + str(row[i]) output += \"\\n\"", "= {} with open(fileName) as csvFile: reader = csv.reader(csvFile) for row in reader:", "= 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12])", "csvArray[i][12] == '': csvArray[i][12] = -9 if csvArray[i][9] == '': csvArray[i][9] = -9", "i in range(len(row)): if i != 0: output += \" \" + str(row[i])", "== 1: row[5] = 2 else: row[5] = -9 file = open(sys.argv[3], 'a')", "to remove.\"\"\" rmArray = [] for i in csvArray.keys(): if csvArray[i][12] == '':", "= 0 fileName = sys.argv[1] csvArray = {} with open(fileName) as csvFile: reader", "{} with open(fileName) as csvFile: reader = csv.reader(csvFile) for row in reader: row.pop()", "for i in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] = -9 if csvArray[i][9]", "if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1:", "+= 1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray = [] for i in", "max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2 else: row[5] = -9 file =", "0: readRow = [i for i in row] csvArray[readRow[2]] = readRow count +=", "reader: row.pop() if count != 0: readRow = [i for i in row]", "from a CSV file.\"\"\" count = 0 fileName = sys.argv[1] csvArray = {}", "i in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] = -9 if csvArray[i][9] ==", "str(row[0]) for i in range(len(row)): if i != 0: output += \" \"", "for i in row] csvArray[readRow[2]] = readRow count += 1 csvFile.close() \"\"\"Find rows", "by the user from a CSV file.\"\"\" count = 0 fileName = sys.argv[1]", "[i for i in row] csvArray[readRow[2]] = readRow count += 1 csvFile.close() \"\"\"Find", "csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray = [] for i in csvArray.keys(): if", "as csvFile: reader = csv.reader(csvFile) for row in reader: row.pop() if count !=", "if csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\" pedArray =", "1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2 else: row[5] = -9", "i in row] csvArray[readRow[2]] = readRow count += 1 csvFile.close() \"\"\"Find rows to", "= \"\" for row in pedArray: output = str(row[0]) for i in range(len(row)):", "row in reader: row.pop() if count != 0: readRow = [i for i", "row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1 elif max(csvArray[row[1]][9],", "passed by the user from a CSV file.\"\"\" count = 0 fileName =", "count != 0: readRow = [i for i in row] csvArray[readRow[2]] = readRow", "[] for line in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5] ==", "open(fileName) as csvFile: reader = csv.reader(csvFile) for row in reader: row.pop() if count", "for row in pedArray: output = str(row[0]) for i in range(len(row)): if i", "file.\"\"\" count = 0 fileName = sys.argv[1] csvArray = {} with open(fileName) as", "csvArray[row[1]][12]) == 1: row[5] = 2 else: row[5] = -9 file = open(sys.argv[3],", "= -9 if csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\"", "import csv import sys csv.field_size_limit(sys.maxsize) \"\"\"Reads in data passed by the user from", "elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2 else: row[5] = -9 file", "pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5] == 1: row[4] = 2 else:", "= open(sys.argv[3], 'a') output = \"\" for row in pedArray: output = str(row[0])", "CSV file.\"\"\" count = 0 fileName = sys.argv[1] csvArray = {} with open(fileName)", "in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5] == 1: row[4] =", "csv.reader(csvFile) for row in reader: row.pop() if count != 0: readRow = [i", "= str(row[0]) for i in range(len(row)): if i != 0: output += \"", "= [] for i in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] = -9", "else: row[5] = -9 file = open(sys.argv[3], 'a') output = \"\" for row", "row.pop() if count != 0: readRow = [i for i in row] csvArray[readRow[2]]", "csvArray = {} with open(fileName) as csvFile: reader = csv.reader(csvFile) for row in", "1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) ==", "pedArray: if csvArray[row[1]][5] == 1: row[4] = 2 else: row[4] = 1 if", "if csvArray[i][12] == '': csvArray[i][12] = -9 if csvArray[i][9] == '': csvArray[i][9] =", "import sys csv.field_size_limit(sys.maxsize) \"\"\"Reads in data passed by the user from a CSV", "line in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5] == 1: row[4]", "fileName = sys.argv[1] csvArray = {} with open(fileName) as csvFile: reader = csv.reader(csvFile)", "= sys.argv[1] csvArray = {} with open(fileName) as csvFile: reader = csv.reader(csvFile) for", "= [] for line in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5]", "'': csvArray[i][12] = -9 if csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read in", "0 fileName = sys.argv[1] csvArray = {} with open(fileName) as csvFile: reader =", "sys.argv[1] csvArray = {} with open(fileName) as csvFile: reader = csv.reader(csvFile) for row", "rmArray = [] for i in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] =", "else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) == 0: row[5] = 1 elif", "\"\" for row in pedArray: output = str(row[0]) for i in range(len(row)): if", "with open(fileName) as csvFile: reader = csv.reader(csvFile) for row in reader: row.pop() if", "= csv.reader(csvFile) for row in reader: row.pop() if count != 0: readRow =", "== 1: row[4] = 2 else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12]) ==", "for line in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if csvArray[row[1]][5] == 1:", "in ped file\"\"\" pedArray = [] for line in open(sys.argv[2]): pedArray.append(line.split()) for row", "range(len(row)): if i != 0: output += \" \" + str(row[i]) output +=", "pedArray = [] for line in open(sys.argv[2]): pedArray.append(line.split()) for row in pedArray: if", "sys csv.field_size_limit(sys.maxsize) \"\"\"Reads in data passed by the user from a CSV file.\"\"\"", "csv.field_size_limit(sys.maxsize) \"\"\"Reads in data passed by the user from a CSV file.\"\"\" count", "'': csvArray[i][9] = -9 \"\"\"Read in ped file\"\"\" pedArray = [] for line", "csvArray[row[1]][5] == 1: row[4] = 2 else: row[4] = 1 if max(csvArray[row[1]][9], csvArray[row[1]][12])", "\"\"\"Read in ped file\"\"\" pedArray = [] for line in open(sys.argv[2]): pedArray.append(line.split()) for", "ped file\"\"\" pedArray = [] for line in open(sys.argv[2]): pedArray.append(line.split()) for row in", "2 else: row[5] = -9 file = open(sys.argv[3], 'a') output = \"\" for", "= readRow count += 1 csvFile.close() \"\"\"Find rows to remove.\"\"\" rmArray = []", "[] for i in csvArray.keys(): if csvArray[i][12] == '': csvArray[i][12] = -9 if", "== '': csvArray[i][12] = -9 if csvArray[i][9] == '': csvArray[i][9] = -9 \"\"\"Read", "'a') output = \"\" for row in pedArray: output = str(row[0]) for i", "a CSV file.\"\"\" count = 0 fileName = sys.argv[1] csvArray = {} with", "0: row[5] = 1 elif max(csvArray[row[1]][9], csvArray[row[1]][12]) == 1: row[5] = 2 else:" ]
[ "\\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text = header + all_articles with open('README.md',", "%H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/',", "= sorted(merged, key=lambda x: x[-1], reverse=True) all_articles = '' for article in output[:TOP_N]:", "+ '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' +", "return dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output = [] for entry", "%d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml',", "'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml',", "'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250", "time import os import itertools import feedparser import urllib.parse import dateutil.parser import signal", "signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt", "output = [] for entry in feed.entries: if entry.updated_parsed is None: article =", "'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N", "dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time))", "\"\"\" @author: Jimit.Dholakia \"\"\" from datetime import datetime, timedelta import time import os", "= dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 =", "time.localtime()) action_badge = '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)'", "import feedparser import urllib.parse import dateutil.parser import signal import sys os.environ['TZ'] = 'Asia/Kolkata'", "list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged, key=lambda x: x[-1], reverse=True) all_articles =", "+ \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"'", "output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url) except Exception as e:", "import signal import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT", "+ feed.feed.title + \\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>'", "article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)'", "' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article", "' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a", "'\\n\\n' complete_text = header + all_articles with open('README.md', 'w') as f: f.write(complete_text) print('RSS", "'' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium:", "medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text =", "else: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title +", "'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum, frame): print('Signal handler", "feedparser import urllib.parse import dateutil.parser import signal import sys os.environ['TZ'] = 'Asia/Kolkata' if", "print('Fetching complete for ' + feed_url) except Exception as e: print('Fetching failed for", "' + feed_url) except Exception as e: print('Fetching failed for ' + feed_url", "reverse=True) all_articles = '' for article in output[:TOP_N]: all_articles += article[0] current_time =", "by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge", "= list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged, key=lambda x: x[-1], reverse=True) all_articles", "complete_text = header + all_articles with open('README.md', 'w') as f: f.write(complete_text) print('RSS Feeds", "article in output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = ''", "print('Fetching failed for ' + feed_url + '\\tException: ' + str(e)) print('Exception:', str(e))", "= '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' +", "%Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/',", "signal import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT =", "sys os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT = '%b %d,", "as e: print('Fetching failed for ' + feed_url + '\\tException: ' + str(e))", "href=\"' + entry.link + '\" target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title +", "+ feed_url + '\\tException: ' + str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler)", "x[-1], reverse=True) all_articles = '' for article in output[:TOP_N]: all_articles += article[0] current_time", "import itertools import feedparser import urllib.parse import dateutil.parser import signal import sys os.environ['TZ']", "'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/',", "'</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url) except Exception as", "failed for ' + feed_url + '\\tException: ' + str(e)) print('Exception:', str(e)) return", "= 250 def handler(signum, frame): print('Signal handler called with signal', signum) sys.exit('Took too", "-*- coding: utf-8 -*- \"\"\" @author: Jimit.Dholakia \"\"\" from datetime import datetime, timedelta", "\\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed)))", "+ \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for '", "'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/',", "time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn:", "'</a><br/>' + feed.feed.title + \\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) +", "feed_url) except Exception as e: print('Fetching failed for ' + feed_url + '\\tException:", "output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge =", "+ '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text = header", "'%b %d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/',", "= list(set(merged)) output = sorted(merged, key=lambda x: x[-1], reverse=True) all_articles = '' for", "'' for article in output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge", "for article in output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge =", "+ '-yellowgreen)' + '\\n\\n' complete_text = header + all_articles with open('README.md', 'w') as", "convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url)", "+ \\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article,", "+ entry.link + '\" target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title + \\", "'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum, frame): print('Signal handler called with signal',", "for ' + feed_url + '\\tException: ' + str(e)) print('Exception:', str(e)) return output", "['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed',", "e: print('Fetching failed for ' + feed_url + '\\tException: ' + str(e)) print('Exception:',", "Exception as e: print('Fetching failed for ' + feed_url + '\\tException: ' +", "| ' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article =", "import datetime, timedelta import time import os import itertools import feedparser import urllib.parse", "'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum, frame): print('Signal handler called with", "if entry.updated_parsed is None: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">'", "'[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' + maintainer_badge + '\\n' + linkedin_badge", "coding: utf-8 -*- \"\"\" @author: Jimit.Dholakia \"\"\" from datetime import datetime, timedelta import", "for ' + feed_url) except Exception as e: print('Fetching failed for ' +", "return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result))", "in feed.entries: if entry.updated_parsed is None: article = '<p><a href=\"' + entry.link +", "linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n'", "header = action_badge + '\\n' + maintainer_badge + '\\n' + linkedin_badge + '\\n'", "medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' + maintainer_badge + '\\n'", "'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum, frame): print('Signal", "time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml',", "key=lambda x: x[-1], reverse=True) all_articles = '' for article in output[:TOP_N]: all_articles +=", "= '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' + maintainer_badge + '\\n' +", "feed.feed.title + \\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append(", "+ '</a><br/>' + feed.feed.title + \\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT)", "'\\tException: ' + str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result =", "all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge = '[![Maintained", "maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)'", "TOP_N = 250 def handler(signum, frame): print('Signal handler called with signal', signum) sys.exit('Took", "= datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed", "complete for ' + feed_url) except Exception as e: print('Fetching failed for '", "from datetime import datetime, timedelta import time import os import itertools import feedparser", "RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged, key=lambda x:", "+ maintainer_badge + '\\n' + linkedin_badge + '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-'", "is None: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title", "maintainer_badge + '\\n' + linkedin_badge + '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' +", "dt = dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt", "TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"' + entry.link +", "+ '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url) except Exception", "timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output = [] for", "= [] for entry in feed.entries: if entry.updated_parsed is None: article = '<p><a", "= feedparser.parse(feed_url) output = [] for entry in feed.entries: if entry.updated_parsed is None:", "except Exception as e: print('Fetching failed for ' + feed_url + '\\tException: '", "dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try:", "output = sorted(merged, key=lambda x: x[-1], reverse=True) all_articles = '' for article in", "signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged)) output", "<reponame>jimit105/rss-feeds-articles<gh_stars>1-10 # -*- coding: utf-8 -*- \"\"\" @author: Jimit.Dholakia \"\"\" from datetime import", "parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"' + entry.link", "itertools import feedparser import urllib.parse import dateutil.parser import signal import sys os.environ['TZ'] =", "def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def", "os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT = '%b %d, %Y", "= '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/',", "linkedin_badge + '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)'", "feed = feedparser.parse(feed_url) output = [] for entry in feed.entries: if entry.updated_parsed is", "article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title + '</a><br/>'", "= '<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title + '</a><br/>' +", "RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/',", "convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url) except Exception as e: print('Fetching failed", "signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged =", "'</a><br/>' + feed.feed.title + \\ ' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT) +", "result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged)) output =", "+ '\\tException: ' + str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result", "all_articles = '' for article in output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT,", "os.name != 'nt': time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS =", "handler(signum, frame): print('Signal handler called with signal', signum) sys.exit('Took too long.. Goodbye!') def", "@author: Jimit.Dholakia \"\"\" from datetime import datetime, timedelta import time import os import", "TIME_FMT = '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml',", "+ '\\n' + linkedin_badge + '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\", "datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed =", "<NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge +", "= 'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S", "merged = list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged, key=lambda x: x[-1], reverse=True)", "+ '\\n' + maintainer_badge + '\\n' + linkedin_badge + '\\n' + medium_badge +", "sorted(merged, key=lambda x: x[-1], reverse=True) all_articles = '' for article in output[:TOP_N]: all_articles", "urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text = header + all_articles with open('README.md', 'w')", "urllib.parse import dateutil.parser import signal import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name !=", "str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged =", "'\" target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title + \\ ' | '", "' + feed_url + '\\tException: ' + str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM,", "' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for", "\"\"\" from datetime import datetime, timedelta import time import os import itertools import", "signal', signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 =", "timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone)", "'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum, frame): print('Signal handler called", "parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time):", "convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url):", "feedparser.parse(feed_url) output = [] for entry in feed.entries: if entry.updated_parsed is None: article", "'</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">'", "+= article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge = '[![Maintained by", "%Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml',", "import os import itertools import feedparser import urllib.parse import dateutil.parser import signal import", "'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ]", "feed.entries: if entry.updated_parsed is None: article = '<p><a href=\"' + entry.link + '\"", "= '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header", "= time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge =", "entry.link + '\" target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title + \\ '", "entry.updated_parsed is None: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' +", "output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged", "[] for entry in feed.entries: if entry.updated_parsed is None: article = '<p><a href=\"'", "= '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge =", "import urllib.parse import dateutil.parser import signal import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name", "| ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete", "'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S %Z'", "if os.name != 'nt': time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS", "'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/'", "Jimit.Dholakia \"\"\" from datetime import datetime, timedelta import time import os import itertools", "'nt': time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml',", "dt2 = dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url)", "= action_badge + '\\n' + maintainer_badge + '\\n' + linkedin_badge + '\\n' +", "+ feed_url) except Exception as e: print('Fetching failed for ' + feed_url +", "+ '\" target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title + \\ ' |", "datetime, timedelta import time import os import itertools import feedparser import urllib.parse import", "def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output = [] for entry in feed.entries:", "target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title + \\ ' | ' +", "called with signal', signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date)", "= list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged,", "\\ ' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else:", "None: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title +", "sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt -", "+ medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text", "+ '\\n\\n' complete_text = header + all_articles with open('README.md', 'w') as f: f.write(complete_text)", "utf-8 -*- \"\"\" @author: Jimit.Dholakia \"\"\" from datetime import datetime, timedelta import time", "+ \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text = header + all_articles with", "current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge", "for entry in feed.entries: if entry.updated_parsed is None: article = '<p><a href=\"' +", "= dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt =", "+ '</a><br/>' + feed.feed.title + \\ ' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT)", "+ '</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"' + entry.link + '\"", "+ \\ ' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated)))", "in output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime()) action_badge = '' maintainer_badge", "!= 'nt': time.tzset() TIME_FMT = '%b %d, %Y %H:%M:%S %Z' RSS_FEEDS = ['https://jimit105.medium.com/feed',", "action_badge + '\\n' + maintainer_badge + '\\n' + linkedin_badge + '\\n' + medium_badge", "'\\n' + linkedin_badge + '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time)", "'\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text = header +", "jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' + maintainer_badge + '\\n' + linkedin_badge +", "fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output = [] for entry in feed.entries: if", "import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt': time.tzset() TIME_FMT = '%b", "+ feed.feed.title + \\ ' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>'", "header + all_articles with open('README.md', 'w') as f: f.write(complete_text) print('RSS Feeds Update Complete')", "entry in feed.entries: if entry.updated_parsed is None: article = '<p><a href=\"' + entry.link", "' + str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds,", "'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N =", "handler called with signal', signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt =", "import dateutil.parser import signal import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt':", "'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics',", "-*- \"\"\" @author: Jimit.Dholakia \"\"\" from datetime import datetime, timedelta import time import", "dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return dt2", "with signal', signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2", "= '' for article in output[:TOP_N]: all_articles += article[0] current_time = time.strftime(TIME_FMT, time.localtime())", "= header + all_articles with open('README.md', 'w') as f: f.write(complete_text) print('RSS Feeds Update", "too long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone)", "entry.title + '</a><br/>' + feed.feed.title + \\ ' | ' + \\ parse_date(entry.updated).strftime(", "- timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output = []", "try: feed = feedparser.parse(feed_url) output = [] for entry in feed.entries: if entry.updated_parsed", "os import itertools import feedparser import urllib.parse import dateutil.parser import signal import sys", "dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output =", "str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0)", "(article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url) except Exception as e: print('Fetching", "TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' + feed_url) except", "'\\n' + maintainer_badge + '\\n' + linkedin_badge + '\\n' + medium_badge + '\\n![Last", "'[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header =", "'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def", "dt2 = dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2", "+ entry.title + '</a><br/>' + feed.feed.title + \\ ' | ' + \\", "print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged", "dateutil.parser import signal import sys os.environ['TZ'] = 'Asia/Kolkata' if os.name != 'nt': time.tzset()", "= dt - timedelta(seconds=time.timezone) return dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output", "dt2 def fetch_feeds(feed_url): try: feed = feedparser.parse(feed_url) output = [] for entry in", "x: x[-1], reverse=True) all_articles = '' for article in output[:TOP_N]: all_articles += article[0]", "timedelta import time import os import itertools import feedparser import urllib.parse import dateutil.parser", "frame): print('Signal handler called with signal', signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date):", "dt - timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt", "'-yellowgreen)' + '\\n\\n' complete_text = header + all_articles with open('README.md', 'w') as f:", "\\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"' +", "parse_date(entry.updated))) else: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title", "- timedelta(seconds=time.timezone) return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt -", "feed_url + '\\tException: ' + str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300)", "list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged, key=lambda", "action_badge = '' maintainer_badge = '[![Maintained by <NAME>](https://img.shields.io/badge/Maintained%20by-jimit105-brightgreen)](https://github.com/jimit105)' linkedin_badge = '[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge", "jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' + maintainer_badge +", "\\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching complete for ' +", "def handler(signum, frame): print('Signal handler called with signal', signum) sys.exit('Took too long.. Goodbye!')", "output.append((article, parse_date(entry.updated))) else: article = '<p><a href=\"' + entry.link + '\" target=\"_blank\">' +", "= ['https://jimit105.medium.com/feed', 'https://jimit105.github.io/pytricks/rss.xml', 'https://scitech105.blogspot.com/rss.xml', 'https://jimit105.github.io/medium-articles/rss.xml', 'https://www.pyimagesearch.com/feed/', 'https://machinelearningmastery.com/feed/', 'https://www.fast.ai/atom.xml', 'https://openai.com/blog/rss/', 'https://research.fb.com/feed/', 'http://googleaiblog.blogspot.com/atom.xml', 'https://blogs.microsoft.com/ai/feed/', 'https://www.analyticsvidhya.com/feed/',", "entry.title + '</a><br/>' + feed.feed.title + \\ ' | ' + \\ convert_timezone(entry.updated_parsed).strftime(", "handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS)) signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged))", "+ str(e)) print('Exception:', str(e)) return output signal.signal(signal.SIGALRM, handler) signal.alarm(300) result = list(map(fetch_feeds, RSS_FEEDS))", "print('Signal handler called with signal', signum) sys.exit('Took too long.. Goodbye!') def parse_date(input_date): dt", "merged = list(set(merged)) output = sorted(merged, key=lambda x: x[-1], reverse=True) all_articles = ''", "+ linkedin_badge + '\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) +", "250 def handler(signum, frame): print('Signal handler called with signal', signum) sys.exit('Took too long..", "feed.feed.title + \\ ' | ' + \\ parse_date(entry.updated).strftime( TIME_FMT) + '</p>' output.append((article,", "'<p><a href=\"' + entry.link + '\" target=\"_blank\">' + entry.title + '</a><br/>' + feed.feed.title", "'https://www.analyticsvidhya.com/feed/', 'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum,", "'https://www.hackerearth.com/blog/machine-learning/feed', 'https://mlfromscratch.com/rss/', 'https://www.mygreatlearning.com/blog/category/artificial-intelligence/feed/', 'https://blog.tensorflow.org/atom.xml', 'http://feeds.feedburner.com/kdnuggets-data-mining-analytics', 'https://lazyprogrammer.me/feed/' ] TOP_N = 250 def handler(signum, frame):", "return dt2 def convert_timezone(datetime_struct_time): dt = datetime.fromtimestamp(time.mktime(datetime_struct_time)) dt2 = dt - timedelta(seconds=time.timezone) return", "' | ' + \\ convert_timezone(entry.updated_parsed).strftime( TIME_FMT) + '</p>' output.append( (article, convert_timezone(entry.updated_parsed))) print('Fetching", "] TOP_N = 250 def handler(signum, frame): print('Signal handler called with signal', signum)", "def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return dt2 def", "'[![LinkedIn: jimit105](https://img.shields.io/badge/LinkedIn-Jimit%20Dholakia-blue)](https://www.linkedin.com/in/jimit105/)' medium_badge = '[![Medium: jimit105](https://img.shields.io/badge/Medium-jimit105-orange)](https://jimit105.medium.com/)' header = action_badge + '\\n' + maintainer_badge", "signal.alarm(0) merged = list(itertools.chain(*result)) merged = list(set(merged)) output = sorted(merged, key=lambda x: x[-1],", "Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n' complete_text = header + all_articles", "import time import os import itertools import feedparser import urllib.parse import dateutil.parser import", "list(set(merged)) output = sorted(merged, key=lambda x: x[-1], reverse=True) all_articles = '' for article", "'\\n' + medium_badge + '\\n![Last Updated](https://img.shields.io/badge/Last%20Updated%20on-' + \\ urllib.parse.quote(current_time) + '-yellowgreen)' + '\\n\\n'", "Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return dt2", "# -*- coding: utf-8 -*- \"\"\" @author: Jimit.Dholakia \"\"\" from datetime import datetime,", "long.. Goodbye!') def parse_date(input_date): dt = dateutil.parser.parse(input_date) dt2 = dt - timedelta(seconds=time.timezone) return", "datetime import datetime, timedelta import time import os import itertools import feedparser import" ]
[ "django.db import models class Book(models.Model): title = models.CharField(max_length=36, unique=True) description = models.TextField(max_length=256, default=None)", "<reponame>Munduruca/django from django.db import models class Book(models.Model): title = models.CharField(max_length=36, unique=True) description =", "from django.db import models class Book(models.Model): title = models.CharField(max_length=36, unique=True) description = models.TextField(max_length=256," ]
[ "async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\" if", "from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL,", "async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\" if unload_ok :=", "laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id]", "laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from", "coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) ->", "err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator", "import annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries", "hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True", "homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS", "api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry:", "err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL)", "await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS)", "\"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant,", "Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import", ".const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def", "async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException as err: raise", "import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS =", "= LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\":", "from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry)", "= async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException as err:", "entry: ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\" if unload_ok := await hass.config_entries.async_unload_platforms(entry,", "LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from", "\"\"\"The laundrify integration.\"\"\" from __future__ import annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions", "from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator", "import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:", "raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await", "= LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\")", "config entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except", "api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid", "try: await api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except", "as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot", "api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator, }", "LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const import", "DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry,", "{ \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass:", "err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\":", "CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client", "except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as err:", "ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core", "UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import", ".coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) ->", "from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry", "def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\" if unload_ok", "from err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err", "import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from", "__future__ import annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from", "from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN,", "DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry:", "HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up laundrify from a config entry.\"\"\" session", "coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client,", "session) try: await api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err", "from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async", "ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import", "import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions", "coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return", "[Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up laundrify from", "homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady", "\"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry)", "True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\"", "entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException", "ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\")", "LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator,", "= { \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def", "ConfigEntry) -> bool: \"\"\"Set up laundrify from a config entry.\"\"\" session = async_get_clientsession(hass)", "laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform", "bool: \"\"\"Unload a config entry.\"\"\" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) return", "HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import", "ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator", "bool: \"\"\"Set up laundrify from a config entry.\"\"\" session = async_get_clientsession(hass) api_client =", "annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import", "hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload", "from __future__ import annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException", "laundrify integration.\"\"\" from __future__ import annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import", "homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from", "def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up laundrify from a config", "\"\"\"Unload a config entry.\"\"\" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) return unload_ok", "from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession", "err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach", "authentication\") from err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from", "homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN", "up laundrify from a config entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session)", "except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator =", "session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token() except UnauthorizedException as", "reach laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN,", "ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass,", "import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from", "import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from", "async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR]", "PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up", "return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a config", "from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {", "async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up laundrify from a", "ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):", "ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh()", "DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant,", "-> bool: \"\"\"Set up laundrify from a config entry.\"\"\" session = async_get_clientsession(hass) api_client", "await api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException", "} hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:", "-> bool: \"\"\"Unload a config entry.\"\"\" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id)", "import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const", "api_client.validate_token() except UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as", "import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException from homeassistant.config_entries import ConfigEntry from homeassistant.const", "from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant", "import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass:", "PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a", "from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed,", "API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] =", "entry: ConfigEntry) -> bool: \"\"\"Set up laundrify from a config entry.\"\"\" session =", "HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Unload a config entry.\"\"\" if unload_ok := await", "LaundrifyUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set", "raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as err: raise ConfigEntryNotReady(\"Cannot reach laundrify", "integration.\"\"\" from __future__ import annotations from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException,", "laundrify from a config entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try:", "async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up laundrify from a config entry.\"\"\"", "{})[entry.entry_id] = { \"api\": api_client, \"coordinator\": coordinator, } hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async", "ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import", "homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from", "from a config entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await", "\"\"\"Set up laundrify from a config entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN],", "= [Platform.BINARY_SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: \"\"\"Set up laundrify", "UnauthorizedException as err: raise ConfigEntryAuthFailed(\"Invalid authentication\") from err except ApiConnectionException as err: raise", "as err: raise ConfigEntryNotReady(\"Cannot reach laundrify API\") from err coordinator = LaundrifyUpdateCoordinator(hass, api_client,", "a config entry.\"\"\" session = async_get_clientsession(hass) api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session) try: await api_client.validate_token()" ]
[ "= int(input()) cards = list(input().split()) deck = [] if len(cards) % 2 ==", "range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid = len(cards) // 2 for i", "test_cases + 1): n = int(input()) cards = list(input().split()) deck = [] if", "// 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid =", "<gh_stars>1-10 test_cases = int(input()) for t in range(1, test_cases + 1): n =", "deck.append(cards[i]) deck.append(cards[i + mid]) else: mid = len(cards) // 2 for i in", "i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid + 1]) deck.append(cards[mid]) print('#{} {}'.format(t, '", "deck.append(cards[i + mid]) else: mid = len(cards) // 2 for i in range(mid):", "[] if len(cards) % 2 == 0: mid = len(cards) // 2 for", "in range(mid): deck.append(cards[i]) deck.append(cards[i + mid + 1]) deck.append(cards[mid]) print('#{} {}'.format(t, ' '.join(deck)))", "1): n = int(input()) cards = list(input().split()) deck = [] if len(cards) %", "mid = len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid", "if len(cards) % 2 == 0: mid = len(cards) // 2 for i", "+ mid]) else: mid = len(cards) // 2 for i in range(mid): deck.append(cards[i])", "= list(input().split()) deck = [] if len(cards) % 2 == 0: mid =", "else: mid = len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i +", "= len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid +", "2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid + 1]) deck.append(cards[mid]) print('#{}", "2 == 0: mid = len(cards) // 2 for i in range(mid): deck.append(cards[i])", "for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid = len(cards) //", "i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid = len(cards) // 2", "n = int(input()) cards = list(input().split()) deck = [] if len(cards) % 2", "0: mid = len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i +", "int(input()) cards = list(input().split()) deck = [] if len(cards) % 2 == 0:", "= int(input()) for t in range(1, test_cases + 1): n = int(input()) cards", "mid = len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid])", "// 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid + 1]) deck.append(cards[mid])", "int(input()) for t in range(1, test_cases + 1): n = int(input()) cards =", "cards = list(input().split()) deck = [] if len(cards) % 2 == 0: mid", "in range(1, test_cases + 1): n = int(input()) cards = list(input().split()) deck =", "= [] if len(cards) % 2 == 0: mid = len(cards) // 2", "in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid = len(cards) // 2 for", "mid]) else: mid = len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i", "len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid", "test_cases = int(input()) for t in range(1, test_cases + 1): n = int(input())", "len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid + 1])", "list(input().split()) deck = [] if len(cards) % 2 == 0: mid = len(cards)", "= len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else:", "== 0: mid = len(cards) // 2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i", "for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid + 1]) deck.append(cards[mid]) print('#{} {}'.format(t,", "t in range(1, test_cases + 1): n = int(input()) cards = list(input().split()) deck", "len(cards) % 2 == 0: mid = len(cards) // 2 for i in", "2 for i in range(mid): deck.append(cards[i]) deck.append(cards[i + mid]) else: mid = len(cards)", "% 2 == 0: mid = len(cards) // 2 for i in range(mid):", "for t in range(1, test_cases + 1): n = int(input()) cards = list(input().split())", "+ 1): n = int(input()) cards = list(input().split()) deck = [] if len(cards)", "range(1, test_cases + 1): n = int(input()) cards = list(input().split()) deck = []", "deck = [] if len(cards) % 2 == 0: mid = len(cards) //" ]
[ "# Copyright 2017 <NAME> # # Licensed under the MIT License. If the", "If the LICENSE file is missing, you # can find the MIT license", "attach routes and custom error pages here # from .main import main as", "create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom error pages", "from config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach", "app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom error pages here # from .main", "Copyright 2017 <NAME> # # Licensed under the MIT License. If the LICENSE", "import Flask, render_template from config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name])", "Flask, render_template from config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app)", "terms here: https://opensource.org/licenses/MIT from flask import Flask, render_template from config import config def", "LICENSE file is missing, you # can find the MIT license terms here:", "app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom error pages here", "can find the MIT license terms here: https://opensource.org/licenses/MIT from flask import Flask, render_template", "routes and custom error pages here # from .main import main as main_blueprint", "# Licensed under the MIT License. If the LICENSE file is missing, you", "from flask import Flask, render_template from config import config def create_app(config_name): app =", "import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and", "and custom error pages here # from .main import main as main_blueprint app.register_blueprint(main_blueprint)", "MIT License. If the LICENSE file is missing, you # can find the", "find the MIT license terms here: https://opensource.org/licenses/MIT from flask import Flask, render_template from", "here: https://opensource.org/licenses/MIT from flask import Flask, render_template from config import config def create_app(config_name):", "= Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom error pages here #", "is missing, you # can find the MIT license terms here: https://opensource.org/licenses/MIT from", "MIT license terms here: https://opensource.org/licenses/MIT from flask import Flask, render_template from config import", "def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom error", "you # can find the MIT license terms here: https://opensource.org/licenses/MIT from flask import", "license terms here: https://opensource.org/licenses/MIT from flask import Flask, render_template from config import config", "the MIT License. If the LICENSE file is missing, you # can find", "# # Licensed under the MIT License. If the LICENSE file is missing,", "error pages here # from .main import main as main_blueprint app.register_blueprint(main_blueprint) return app", "under the MIT License. If the LICENSE file is missing, you # can", "missing, you # can find the MIT license terms here: https://opensource.org/licenses/MIT from flask", "the MIT license terms here: https://opensource.org/licenses/MIT from flask import Flask, render_template from config", "License. If the LICENSE file is missing, you # can find the MIT", "custom error pages here # from .main import main as main_blueprint app.register_blueprint(main_blueprint) return", "<NAME> # # Licensed under the MIT License. If the LICENSE file is", "# can find the MIT license terms here: https://opensource.org/licenses/MIT from flask import Flask,", "config[config_name].init_app(app) # attach routes and custom error pages here # from .main import", "flask import Flask, render_template from config import config def create_app(config_name): app = Flask(__name__)", "config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes", "# attach routes and custom error pages here # from .main import main", "file is missing, you # can find the MIT license terms here: https://opensource.org/licenses/MIT", "Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom error pages here # from", "render_template from config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) #", "config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # attach routes and custom", "Licensed under the MIT License. If the LICENSE file is missing, you #", "https://opensource.org/licenses/MIT from flask import Flask, render_template from config import config def create_app(config_name): app", "the LICENSE file is missing, you # can find the MIT license terms", "2017 <NAME> # # Licensed under the MIT License. If the LICENSE file" ]
[ ">= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool only_once: bool #", "reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, positive_weight: float = 1.,", "self.n class CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium,", "Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec or you", "np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward", "return np.array([ # Extra -1 is included to avoid taking max of empty", "self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward = np.where( reward > 0.,", "count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff", "= 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start with 1", "below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining a reward space for", "# Subtask reward spaces defined below # NB: Subtasks that are \"different enough\"", "goal_reached.any() or done @abstractmethod def completed_task(self, game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self,", "@abstractmethod def completed_task(self, game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) ->", "np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"],", "tiles with unit count as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p", "sum([unit.cargo.coal for unit in player.units]) for player in game_state.players ]) >= self.n class", "Dict[str, np.ndarray]: return {} # Full game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace):", "from ..lux.game_objects import Player def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for player", "def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state:", "spaces defined below # NB: Subtasks that are \"different enough\" should be defined", "\"unit\": new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points, # Don't penalize losing fuel", "done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def", "\"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val for key,", "bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop: done = done or should_early_stop(game_state) return", "self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\": 1.,", "rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards = (rankdata(rewards) - 1.) *", "10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0.,", "= np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1,", "super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "defined below # NB: Subtasks that are \"different enough\" should be defined separately", "np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec(", "should be defined separately since each subtask gets its own embedding # See", "return np.array([ sum([unit.cargo.uranium for unit in player.units]) for player in game_state.players ]) >=", "Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel =", "n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in", "GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True )", "done: return 0., 0. # reward here is defined as the sum of", "size is 32 x 32 => 1024 max city tiles and units, #", "def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def", "negative_weight: float = 1., early_stop: bool = False, **kwargs ): assert positive_weight >", "def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: pass def get_info(self)", "| (unit_diff < 0) reward = np.where( lost_unit_or_city, -0.1, reward / 1_000. )", "n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def", "self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel,", "def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n =", "logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod", "return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray: return", "failed_task, 0., 0.5 + 0.5 * completed_task.astype(float) ) else: rewards = completed_task.astype(float) done", "city tiles and units, # so this should keep it strictly so we", "Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state: Game) ->", "reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w in self.weights.items()], axis=0", "spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining a reward", "done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return", "def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward = np.where( reward > 0., self.positive_weight", "reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "np.ndarray: return np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count", "self.early_stop: done = done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state:", "= (rankdata(rewards) - 1.) * 2. - 1. return tuple(rewards) @staticmethod def compute_player_reward(player:", "game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in player.units]) for player", "= 1., early_stop: bool = False, **kwargs ): assert positive_weight > 0. assert", "= GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state: Game)", "GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count", "# See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations for subtasks? class Subtask(BaseRewardSpace,", "for city in player.cities.values()]) for player in game_state.players ]) def count_research_points(game_state: Game) ->", "{kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game,", "= np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 0.,", "done), done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: new_city_count =", "from ..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player def", "RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self,", "1., early_stop: bool = False, **kwargs ): assert positive_weight > 0. assert negative_weight", "bool) -> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points =", "count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count", "get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state:", "< 0) reward = np.where( lost_unit_or_city, -0.1, reward / 1_000. ) return tuple(reward),", "-> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in player.units]) for player in game_state.players", "= np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w in self.weights.items()], axis=0 ).sum(axis=0) return", "risk breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True )", "= { \"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, #", "key in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key]", "1., negative_weight: float = 1., early_stop: bool = False, **kwargs ): assert positive_weight", "player.city_tile_count unit_count = len(player.units) # max board size is 32 x 32 =>", "reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs)", "separately since each subtask gets its own embedding # See obs_spaces.SUBTASK_ENCODING # TODO:", "2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start with 1 city", "return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float,", "float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self,", "done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points", "self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count)", "* GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game) ->", "): assert positive_weight > 0. assert negative_weight > 0. self.positive_weight = positive_weight self.negative_weight", "kwargs.items() if key in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys():", "reward = np.where( reward > 0., self.positive_weight * reward, reward ) reward =", "self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "done = failed_task.any() or completed_task.any() or done if done: self._reset() return tuple(rewards), done", "- self.unit_count, \"research\": new_research_points - self.research_points, # Don't penalize losing fuel at night", "sum of number of city tiles with unit count as a tie-breaking mechanism", "of city tiles with unit count as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p))", "only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state)", "from scipy.stats import rankdata from typing import Dict, NamedTuple, NoReturn, Tuple from ..lux.game", "game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel for city in player.cities.values()])", "reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining a", "= new_unit_count return failed def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count =", "np.array([ # Extra -1 is included to avoid taking max of empty sequence", "player.cities.values()]) for player in game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points", "np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for", "positive_weight: float = 1., negative_weight: float = 1., early_stop: bool = False, **kwargs", "-> np.ndarray: return np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray:", "game_state: Game, done: bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec()", "= np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\": 1., \"unit\":", "losing fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() >", "game_state: Game, done: bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return", "* reward, reward ) return reward def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count)", "keep it strictly so we break by city tiles then unit count return", "# Full game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used", "step that their cargo remains full # \"full_workers\": -0.01, \"full_workers\": 0., # A", "new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count - self.city_count,", "def completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players]) >=", "city in player.cities.values()] + [0]) for player in game_state.players ]) >= self.n_city_tiles class", "/ max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(), 1) return ((ct_count == 0).any()", "**kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return", "np.ndarray: return np.array([ sum([city.fuel for city in player.cities.values()]) for player in game_state.players ])", "embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations for subtasks? class", "\"full_workers\": 0., # A reward given each step \"step\": 0., } self.weights.update({key: val", "sum([unit.cargo.wood for unit in player.units]) for player in game_state.players ]) >= self.n class", "= count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff =", "def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count = len(player.units) # max board size", "return reward def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points", "def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n =", "space and/or done state for either the full game or a sub-task \"\"\"", "unit in player.units]) for player in game_state.players ]) >= self.n class CollectNCoal(Subtask): def", "new_unit_count < self.unit_count ) self.city_count = new_city_count self.unit_count = new_unit_count return failed def", "A reward given each step \"step\": 0., } self.weights.update({key: val for key, val", "= ct_count / max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(), 1) return ((ct_count", "= GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"]", "reward_items_dict = { \"city\": new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points", "self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles", "game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count =", "obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod", "2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray:", "__init__(self, early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self,", "len(player.units) # max board size is 32 x 32 => 1024 max city", "key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def", "or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done: bool) ->", "\"city\": new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points, #", "-> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) ->", "self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class", "night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit", "= np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces defined", "positive_weight self.negative_weight = negative_weight self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count =", "= new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward", "in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward,", "\"Players start with 1 city tile already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state:", "MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles >", "\"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize workers", "each step \"step\": 0., } self.weights.update({key: val for key, val in kwargs.items() if", "avoid taking max of empty sequence max([len(city.citytiles) for city in player.cities.values()] + [0])", "target locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\"", "+ 0.5 * completed_task.astype(float) ) else: rewards = completed_task.astype(float) done = failed_task.any() or", "reward, reward ) reward = np.where( reward < 0., self.negative_weight * reward, reward", "Game, done: bool) -> Tuple[float, float]: if not done: return 0., 0. #", "game_state: Game, done: bool) -> Tuple[float, float]: if not done: return 0., 0.", "= game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for", "class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs)", "completed_task.astype(float) ) else: rewards = completed_task.astype(float) done = failed_task.any() or completed_task.any() or done", "0. # reward here is defined as the sum of number of city", "done if done: self._reset() return tuple(rewards), done def completed_task(self, game_state: Game) -> np.ndarray:", "new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff = new_unit_count - self.unit_count", "for unit in player.units]) for player in game_state.players ]) >= self.n class MakeNCityTiles(Subtask):", "= n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for unit", "class CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs)", "subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n", "np.ndarray: return np.array([ sum([unit.cargo.wood for unit in player.units]) for player in game_state.players ])", "BaseRewardSpace(ABC): \"\"\" A class used for defining a reward space and/or done state", "self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points for", "= np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: failed_task", "bool) -> Tuple[float, float]: if not done: return 0., 0. # reward here", "float]: if not done: return 0., 0. # reward here is defined as", "in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel for city in", "# \"full_workers\": -0.01, \"full_workers\": 0., # A reward given each step \"step\": 0.,", "max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(), 1) return ((ct_count == 0).any() or", "def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state)", "reward def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points =", "- 1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count =", "self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop:", "bool) -> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any():", "0., 0.5 + 0.5 * completed_task.astype(float) ) else: rewards = completed_task.astype(float) done =", "spaces defined below class BaseRewardSpace(ABC): \"\"\" A class used for defining a reward", "compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count", "Player): ct_count = player.city_tile_count unit_count = len(player.units) # max board size is 32", "2. - 1. self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count = new_city_count self.unit_count", "= np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces defined below # NB:", "negative_weight > 0. self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop = early_stop self.city_count", "return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game, done: bool)", "unit in player.units]) for player in game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def", "game_state: Game) -> np.ndarray: return np.array([ # Extra -1 is included to avoid", "for key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0) |", ") else: rewards = completed_task.astype(float) done = failed_task.any() or completed_task.any() or done if", "**kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "**kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() ->", "reward < 0., self.negative_weight * reward, reward ) return reward def _reset(self) ->", "/ 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. /", "unit.is_worker()) for player in game_state.players ]), \"step\": np.ones(2, dtype=float) } if done: game_result_reward", "axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0) | (unit_diff < 0) reward =", "[int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards = (rankdata(rewards) - 1.) * 2. -", "self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: if", "bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state)", "np.ones(2, dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward", "early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state:", "= game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w for", "in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self,", "\"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state,", "if done: self._reset() return tuple(rewards), done def completed_task(self, game_state: Game) -> np.ndarray: return", "unit_count / max(unit_count.sum(), 1) return ((ct_count == 0).any() or (unit_count == 0).any() or", "} if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward = (rankdata(game_result_reward)", "(rankdata(game_result_reward) - 1.) * 2. - 1. self._reset() else: game_result_reward = np.array([0., 0.])", "game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] *", "in game_state.players ]), \"step\": np.ones(2, dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for", "\"fuel\": 0.001, } self.weights.update({key: val for key, val in kwargs.items() if key in", "taking max of empty sequence max([len(city.citytiles) for city in player.cities.values()] + [0]) for", "> 0. assert negative_weight > 0. self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop", "self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "failed_task.any(): rewards = np.where( failed_task, 0., 0.5 + 0.5 * completed_task.astype(float) ) else:", "class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec or", "* 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec(", "only_once: bool # All reward spaces defined below class BaseRewardSpace(ABC): \"\"\" A class", "@staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done:", "def completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def", "max(unit_count.sum(), 1) return ((ct_count == 0).any() or (unit_count == 0).any() or (ct_pct >=", "Tuple[Tuple[float, float], bool]: pass def get_info(self) -> Dict[str, np.ndarray]: return {} # Full", "self.city_count = new_city_count self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"]", "max([len(city.citytiles) for city in player.cities.values()] + [0]) for player in game_state.players ]) >=", "def should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct =", "float = 1., negative_weight: float = 1., early_stop: bool = False, **kwargs ):", "done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or", "\"\"\" Don't override reward_spec or you risk breaking classes like multi_subtask.MultiSubtask \"\"\" return", "FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining a reward space for the full", "done: bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self,", "val in kwargs.items() if key in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key", "Game) -> bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count /", "or completed_task.any() or done if done: self._reset() return tuple(rewards), done def completed_task(self, game_state:", "game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for unit in player.units]) for player", "either the full game or a sub-task \"\"\" def __init__(self, **kwargs): if kwargs:", "if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward = (rankdata(game_result_reward) -", "CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n", "Game, done: bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points", "if not done: return 0., 0. # reward here is defined as the", "def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for unit in player.units])", "/ max(unit_count.sum(), 1) return ((ct_count == 0).any() or (unit_count == 0).any() or (ct_pct", "for player in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int", "new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] *", "else: game_result_reward = np.array([0., 0.]) self.city_count = new_city_count self.unit_count = new_unit_count self.research_points =", "as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards =", "/ GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, positive_weight: float = 1., negative_weight:", "0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize workers each step that their cargo", "workers each step that their cargo remains full # \"full_workers\": -0.01, \"full_workers\": 0.,", "completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for unit in player.units]) for", "self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for", "0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit in player.units if unit.is_worker()) for", "self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert", "copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state:", "new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players]", "def __init__( self, positive_weight: float = 1., negative_weight: float = 1., early_stop: bool", "< self.unit_count ) self.city_count = new_city_count self.unit_count = new_unit_count return failed def _reset(self)", "reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if", "self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "completed_task(self, game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) -> int: return", "-0.01, \"full_workers\": 0., # A reward given each step \"step\": 0., } self.weights.update({key:", "self.unit_count = new_unit_count return failed def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count", "bool]: return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state: Game, done: bool) ->", "max of empty sequence max([len(city.citytiles) for city in player.cities.values()] + [0]) for player", "== reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w for key, w in self.weights.items()],", "else: rewards = completed_task.astype(float) done = failed_task.any() or completed_task.any() or done if done:", "def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False,", "player in game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int =", "count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state: Game)", "(rankdata(rewards) - 1.) * 2. - 1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player):", "-> bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count / max(ct_count.sum(),", "self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self, game_state: Game) -> np.ndarray:", "False, **kwargs ): assert positive_weight > 0. assert negative_weight > 0. self.positive_weight =", "count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(), 1)", "reward spaces defined below # NB: Subtasks that are \"different enough\" should be", "or (unit_count == 0).any() or (ct_pct >= 0.75).any() or (unit_pct >= 0.75).any()) class", "new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count - self.city_count, \"unit\": new_unit_count -", "player in game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state) unit_count =", "self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal", "compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: raise NotImplementedError def _reset(self) ->", "import copy import logging import numpy as np from scipy.stats import rankdata from", "= self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self, game_state: Game) ->", "0., self.positive_weight * reward, reward ) reward = np.where( reward < 0., self.negative_weight", "def __init__( self, **kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points", "**kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray: return", "class CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n =", "np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\":", "in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self,", "self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]:", "CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel", "GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game)", "np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel", "or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: raise NotImplementedError", "raise NotImplementedError def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points", "NotImplementedError def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points =", "Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state: Game, done:", "/ 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward =", "rewards = completed_task.astype(float) done = failed_task.any() or completed_task.any() or done if done: self._reset()", "self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0) | (unit_diff < 0) reward", "# A reward given each step \"step\": 0., } self.weights.update({key: val for key,", "new_city_count - self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict = { \"city\": new_city_count,", "board size is 32 x 32 => 1024 max city tiles and units,", "done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: if not done:", ">= 0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum:", "a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards = (rankdata(rewards)", "]) >= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles,", "n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count =", "compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return", "if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game,", "break by city tiles then unit count return ct_count * 10000 + unit_count", ").sum(axis=0) return tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) ->", "// 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) ->", "w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def", "-> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() ->", "count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count", "self.positive_weight * reward, reward ) reward = np.where( reward < 0., self.negative_weight *", "__init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self,", "= np.stack( [reward_items_dict[key] * w for key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city", "for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset()", "locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't", "-> Tuple[Tuple[float, float], bool]: pass def get_info(self) -> Dict[str, np.ndarray]: return {} #", "classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self,", "compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count =", "abstractmethod import copy import logging import numpy as np from scipy.stats import rankdata", "def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count", "should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float,", "* w for key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff <", "compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done))", "self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task, 0., 0.5 + 0.5 * completed_task.astype(float)", "a reward space and/or done state for either the full game or a", "for unit in player.units]) for player in game_state.players ]) >= self.n class CollectNUranium(Subtask):", "ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec or you risk", "-> np.ndarray: reward = np.where( reward > 0., self.positive_weight * reward, reward )", "bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task,", "self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count)", "game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct", "as the sum of number of city tiles with unit count as a", "numpy as np from scipy.stats import rankdata from typing import Dict, NamedTuple, NoReturn,", "# All reward spaces defined below class BaseRewardSpace(ABC): \"\"\" A class used for", "tiles and units, # so this should keep it strictly so we break", "pass def get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self,", "{ \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward", "= n_research_points def completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points for player in", "tile already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state)", "self.n class CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal,", "player in game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for player", "2. - 1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count", "n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for unit in", "count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray: return np.array([", ">= self.target_step ]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count", "= np.where( reward < 0., self.negative_weight * reward, reward ) return reward def", "that are \"different enough\" should be defined separately since each subtask gets its", "in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self,", "* cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count)", "np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() ->", "self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs", "n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask):", "RewardSpec: \"\"\" Don't override reward_spec or you risk breaking classes like multi_subtask.MultiSubtask \"\"\"", "count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict =", "unit_pct = unit_count / max(unit_count.sum(), 1) return ((ct_count == 0).any() or (unit_count ==", "= np.array([0., 0.]) self.city_count = new_city_count self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel", "return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]:", "10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize workers each", "rewards = (rankdata(rewards) - 1.) * 2. - 1. return tuple(rewards) @staticmethod def", "self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs)", "\"research\": 0.1, \"fuel\": 0.005, # Penalize workers each step that their cargo remains", "..lux.game_objects import Player def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for player in", "/ 1_000. ) return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done:", "assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w for key, w", "in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"]", "float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state: Game, done: bool)", "np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces defined below", "ABC, abstractmethod import copy import logging import numpy as np from scipy.stats import", "its own embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations for", "1.) * 2. - 1. self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count =", "del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "# Extra -1 is included to avoid taking max of empty sequence max([len(city.citytiles)", "class used for defining a reward space and/or done state for either the", "in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0) | (unit_diff < 0)", ">= self.n class CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs):", "self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\":", "self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood", "def __init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1,", "-> np.ndarray: return np.array([ sum([unit.cargo.coal for unit in player.units]) for player in game_state.players", "- self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points, # Don't penalize", "- 1. self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count = new_city_count self.unit_count =", "= new_city_count self.unit_count = new_unit_count return failed def _reset(self) -> NoReturn: self.city_count =", "new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict", "Subtask reward spaces defined below # NB: Subtasks that are \"different enough\" should", "NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel)", "float], bool]: pass def get_info(self) -> Dict[str, np.ndarray]: return {} # Full game", "step \"step\": 0., } self.weights.update({key: val for key, val in kwargs.items() if key", "# Don't penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\":", "for player in game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for", "weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward = np.where( reward > 0., self.positive_weight *", "like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state:", "np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state)", "np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state)", "= np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return", "= count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count < self.city_count, new_unit_count <", "new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() ==", "player in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int =", "game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop: done = done", "\"research\": new_research_points - self.research_points, # Don't penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel", "> 0. self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop = early_stop self.city_count =", "\"\"\" A class used for defining a reward space for the full game.", "super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self,", "-> np.ndarray: return np.array([ sum([city.fuel for city in player.cities.values()]) for player in game_state.players", "game_state.players ]) >= self.n class CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] //", "np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: failed_task =", "self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def", "city tiles with unit count as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for", ">= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ #", "Tuple from ..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player", "get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False", "Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for unit in player.units]) for player in", "Dict, NamedTuple, NoReturn, Tuple from ..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS from", "import Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state: Game)", "subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec", "done = done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game,", "Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game)", "int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step =", "GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for", "* 2. - 1. self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count = new_city_count", "rankdata from typing import Dict, NamedTuple, NoReturn, Tuple from ..lux.game import Game from", "self.target_step ]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count =", "new_unit_count - self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\":", "GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state: Game) ->", "-> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state)", "self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: new_city_count", "for defining a reward space for the full game. \"\"\" def compute_rewards_and_done(self, game_state:", "if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game,", "5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray:", "unit count as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players]", "\"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit in player.units if unit.is_worker()) for player", "compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: pass def get_info(self) ->", "super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([", "-> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards", "-> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask):", "reward ) return reward def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count =", "done @abstractmethod def completed_task(self, game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict)", "\"different enough\" should be defined separately since each subtask gets its own embedding", "game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players])", "class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False", "class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True", "count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state: Game)", "p in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) * 2. - 1. self._reset()", "Tuple[float, float]: if not done: return 0., 0. # reward here is defined", "at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for", "= n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class", "self.weights = { \"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001,", "zero_sum=True, only_once=True ) def __init__(self, early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop", "then unit count return ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def", "return ((ct_count == 0).any() or (unit_count == 0).any() or (ct_pct >= 0.75).any() or", "player in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel for city", "zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "player in game_state.players]) def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for player in", "completed_task.astype(float) done = failed_task.any() or completed_task.any() or done if done: self._reset() return tuple(rewards),", "= count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff = new_unit_count", "# reward here is defined as the sum of number of city tiles", "{ \"city\": new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points,", "in game_state.players ]) >= self.n class CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"]", "return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count = len(player.units) #", "def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.)", "**kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len", "= np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1.,", "NB: Subtasks that are \"different enough\" should be defined separately since each subtask", "for player in game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state) unit_count", "bool) -> Tuple[Tuple[float, float], bool]: pass def get_info(self) -> Dict[str, np.ndarray]: return {}", "float], bool]: if self.early_stop: done = done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done", "reward = np.stack( [reward_items_dict[key] * w for key, w in self.weights.items()], axis=0 ).sum(axis=0)", "you risk breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True", "= new_city_count - self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict = { \"city\":", "\"fuel\": 0.005, # Penalize workers each step that their cargo remains full #", "self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward:", "= [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards = (rankdata(rewards) - 1.) * 2.", "GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, positive_weight: float = 1., negative_weight: float", "reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, **kwargs ): self.city_count =", "- 1.) * 2. - 1. self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count", "failed = np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count ) self.city_count = new_city_count", ") def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward,", "new_unit_count return failed def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count)", "{ \"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize", "in game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state)", "= np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask", "since each subtask gets its own embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow", "Game, done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod", "received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod def", "__init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel", "unit_diff = new_unit_count - self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count, \"research\":", "zero_sum=False, only_once=False ) def __init__( self, positive_weight: float = 1., negative_weight: float =", "- self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel,", "new_city_count < self.city_count, new_unit_count < self.unit_count ) self.city_count = new_city_count self.unit_count = new_unit_count", "int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self,", "units, # so this should keep it strictly so we break by city", "for player in game_state.players ]) >= self.n class CollectNUranium(Subtask): def __init__(self, n: int", "def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ # Extra -1 is included", "class SurviveNNights(Subtask): def __init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len =", "@staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec or you risk breaking", "player.units]) for player in game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles:", "> 0 for unit in player.units if unit.is_worker()) for player in game_state.players ]),", "np.ndarray: return np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return", "= n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for unit", "0., 0. # reward here is defined as the sum of number of", "return {} # Full game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A", "n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles):", "class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs)", "space for the full game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "np.ndarray: reward = np.where( reward > 0., self.positive_weight * reward, reward ) reward", "np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit in player.units", "self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel", "= GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) ->", "GetNResearchPoints(Subtask): def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points", "unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False,", "Player def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for player in game_state.players]) def", "= failed_task.any() or completed_task.any() or done if done: self._reset() return tuple(rewards), done def", "kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state:", "given each step \"step\": 0., } self.weights.update({key: val for key, val in kwargs.items()", "reward_spec or you risk breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1.,", "@staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count = len(player.units) # max board", "reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]:", "0. assert negative_weight > 0. self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop =", "GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game)", "for player in game_state.players ]), \"step\": np.ones(2, dtype=float) } if done: game_result_reward =", "int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game)", "game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state)", "reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def", "{} # Full game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class", "# so this should keep it strictly so we break by city tiles", "np.array([ sum([unit.cargo.uranium for unit in player.units]) for player in game_state.players ]) >= self.n", "CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n", "\"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward =", "start with 1 city tile already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game)", "each subtask gets its own embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include", "be defined separately since each subtask gets its own embedding # See obs_spaces.SUBTASK_ENCODING", "self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for", "import GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count", "= early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel", "are \"different enough\" should be defined separately since each subtask gets its own", "new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p", "self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) #", "should_early_stop(game_state: Game) -> bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count", "is 32 x 32 => 1024 max city tiles and units, # so", "done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop: done = done or should_early_stop(game_state)", "= np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count ) self.city_count = new_city_count self.unit_count", "logging import numpy as np from scipy.stats import rankdata from typing import Dict,", "count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights,", "def completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def", "super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points", "or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool only_once:", "or (ct_pct >= 0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max:", "done: bool) -> Tuple[float, float]: if not done: return 0., 0. # reward", "count_units(game_state) failed = np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count ) self.city_count =", "Game) -> np.ndarray: return np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state: Game) ->", "def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel =", "n_city_tiles > 1, \"Players start with 1 city tile already\" self.n_city_tiles = n_city_tiles", "np.where( reward < 0., self.negative_weight * reward, reward ) return reward def _reset(self)", "w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0) | (unit_diff <", "super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert", "compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count = len(player.units) # max board size is", "= np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward):", "included to avoid taking max of empty sequence max([len(city.citytiles) for city in player.cities.values()]", "= self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task, 0., 0.5", "reward space and/or done state for either the full game or a sub-task", ">= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs)", "> 0., self.positive_weight * reward, reward ) reward = np.where( reward < 0.,", "{ \"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key:", "**kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return", "in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward,", "Game, done: bool) -> Tuple[float, float]: raise NotImplementedError def _reset(self) -> NoReturn: self.city_count", "only_once=False ) def __init__( self, **kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count =", "np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray:", "RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop: bool = False, **kwargs):", "\"research\": new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in", "def completed_task(self, game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) -> int:", "key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0) | (unit_diff", "failed_task.any() or completed_task.any() or done if done: self._reset() return tuple(rewards), done def completed_task(self,", "np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\":", "RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "done), done @abstractmethod def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: pass", "* reward, reward ) reward = np.where( reward < 0., self.negative_weight * reward,", "return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game, done: bool)", "with 1 city tile already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game) ->", "= len(player.units) # max board size is 32 x 32 => 1024 max", "player.units]) for player in game_state.players ]) >= self.n class CollectNUranium(Subtask): def __init__(self, n:", "breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def", "below class BaseRewardSpace(ABC): \"\"\" A class used for defining a reward space and/or", "< 0., self.negative_weight * reward, reward ) return reward def _reset(self) -> NoReturn:", "< self.city_count, new_unit_count < self.unit_count ) self.city_count = new_city_count self.unit_count = new_unit_count return", "lost_unit_or_city = (city_diff < 0) | (unit_diff < 0) reward = np.where( lost_unit_or_city,", "return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int = 1, **kwargs):", "0 for unit in player.units if unit.is_worker()) for player in game_state.players ]), \"step\":", "cargo remains full # \"full_workers\": -0.01, \"full_workers\": 0., # A reward given each", "return tuple(rewards), done def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ game_state.turn >=", "\"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val for key, val in", "1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count = len(player.units)", "multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game,", "used for defining a reward space for the full game. \"\"\" def compute_rewards_and_done(self,", "-> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel =", "game or a sub-task \"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected", "bool = False, **kwargs ): assert positive_weight > 0. assert negative_weight > 0.", "\"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit in", "= new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys()", "np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights =", "done), done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: if not", "-> np.ndarray: return np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state: Game) -> bool:", "so we break by city tiles then unit count return ct_count * 10000", "StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. /", "self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\":", "\"\"\" A class used for defining a reward space and/or done state for", "Game, done: bool) -> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state)", "]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"],", "..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state:", "subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int =", "return np.array([len(player.units) for player in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([", "only_once=False ) def __init__( self, positive_weight: float = 1., negative_weight: float = 1.,", "self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len", "copy import logging import numpy as np from scipy.stats import rankdata from typing", "for key, val in kwargs.items() if key in self.weights.keys()}) for key in copy.copy(kwargs).keys():", "for player in game_state.players]) def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for player", "-> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False )", "np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() ->", "RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs)", "player.cities.values()] + [0]) for player in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def", "only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: reward =", "self._reset() return tuple(rewards), done def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ game_state.turn", "__init__( self, **kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points =", "bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state:", "player in game_state.players ]), \"step\": np.ones(2, dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p))", "self.research_points, # Don't penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0),", "game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)),", "np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def", "new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff", "= GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state:", "0).any() or (ct_pct >= 0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float", "class BaseRewardSpace(ABC): \"\"\" A class used for defining a reward space and/or done", "@staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def", "self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys()", "def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def", "pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: pass", "early_stop def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop:", "game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w for key,", "<= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game,", "done: bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec:", "reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"],", "0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool", "int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood,", "bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean())", "Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace):", "tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards = (rankdata(rewards) -", "= np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\": 1., \"unit\":", "np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\":", "0.01, \"fuel\": 0.001, } self.weights.update({key: val for key, val in kwargs.items() if key", "self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count", "-> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel", "Full game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for", "1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize workers each step that", "RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game, done:", "new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count -", "-> Tuple[Tuple[float, float], bool]: if self.early_stop: done = done or should_early_stop(game_state) return self.compute_rewards(game_state,", "def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for unit in player.units])", "self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w for key, w in", "new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p))", "return 0., 0. # reward here is defined as the sum of number", "tuple(rewards), done def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ game_state.turn >= self.target_step", "class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1.", "= np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec()", "import ABC, abstractmethod import copy import logging import numpy as np from scipy.stats", "**kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return", "defined separately since each subtask gets its own embedding # See obs_spaces.SUBTASK_ENCODING #", "Game) -> np.ndarray: return np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state: Game) ->", "game_result_reward = (rankdata(game_result_reward) - 1.) * 2. - 1. self._reset() else: game_result_reward =", "cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def", "__init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"]", "fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0", "to avoid taking max of empty sequence max([len(city.citytiles) for city in player.cities.values()] +", "// 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) ->", "np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in player.units]) for player in game_state.players ])", "game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights:", "Tuple[Tuple[float, float], bool]: if self.early_stop: done = done or should_early_stop(game_state) return self.compute_rewards(game_state, done),", "[reward_items_dict[key] * w for key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff", "np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\":", "Penalize workers each step that their cargo remains full # \"full_workers\": -0.01, \"full_workers\":", "self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec:", "cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert self.target_step <=", "done: bool) -> Tuple[float, float]: raise NotImplementedError def _reset(self) -> NoReturn: self.city_count =", "def get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n:", "-> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count <", "self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces defined below # NB: Subtasks that", ") def __init__(self, early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop", "import numpy as np from scipy.stats import rankdata from typing import Dict, NamedTuple,", "self.n_research_points = n_research_points def completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points for player", "Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel", "= 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights", "int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self,", "if failed_task.any(): rewards = np.where( failed_task, 0., 0.5 + 0.5 * completed_task.astype(float) )", "in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) * 2. - 1. self._reset() else:", "game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key,", "-> RewardSpec: \"\"\" Don't override reward_spec or you risk breaking classes like multi_subtask.MultiSubtask", "copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state:", "in player.cities.values()]) for player in game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray: return", "the sum of number of city tiles with unit count as a tie-breaking", ">= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs):", "@abstractmethod def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace):", "= { \"city\": new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points -", "> 1, \"Players start with 1 city tile already\" self.n_city_tiles = n_city_tiles def", "]), \"step\": np.ones(2, dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in", "negative_weight self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points =", "-> Tuple[float, float]: raise NotImplementedError def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count", "\"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val for key, val in kwargs.items() if", "self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "rewards = np.where( failed_task, 0., 0.5 + 0.5 * completed_task.astype(float) ) else: rewards", "< 0) | (unit_diff < 0) reward = np.where( lost_unit_or_city, -0.1, reward /", "each step that their cargo remains full # \"full_workers\": -0.01, \"full_workers\": 0., #", "]) def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players]) def", "tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward", "n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def", "only_once=True ) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached", "tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1.", "game_state: Game, done: bool) -> Tuple[float, float]: raise NotImplementedError def _reset(self) -> NoReturn:", "GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray:", "get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state:", "self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces defined below #", "== 0).any() or (ct_pct >= 0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min:", "gets its own embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations", "reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]:", "self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ # Extra", "= np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01,", "dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"],", "defined below class BaseRewardSpace(ABC): \"\"\" A class used for defining a reward space", "zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: return", "zero_sum: bool only_once: bool # All reward spaces defined below class BaseRewardSpace(ABC): \"\"\"", "reward / 1_000. ) return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game,", "new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward =", "for unit in player.units]) for player in game_state.players ]) >= self.n class CollectNCoal(Subtask):", "bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec()", "or done if done: self._reset() return tuple(rewards), done def completed_task(self, game_state: Game) ->", "np.array([ sum([unit.cargo.wood for unit in player.units]) for player in game_state.players ]) >= self.n", "compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class", "assert n_city_tiles > 1, \"Players start with 1 city tile already\" self.n_city_tiles =", "so this should keep it strictly so we break by city tiles then", "float]: raise NotImplementedError def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count)", "reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop: bool = False, **kwargs): super(GameResultReward,", "key, w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500. / max(self.positive_weight, self.negative_weight))", "GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game)", "self.weights.update({key: val for key, val in kwargs.items() if key in self.weights.keys()}) for key", "def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in player.units])", "enough\" should be defined separately since each subtask gets its own embedding #", "= False, **kwargs ): assert positive_weight > 0. assert negative_weight > 0. self.positive_weight", "n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self, game_state:", "state for either the full game or a sub-task \"\"\" def __init__(self, **kwargs):", "reward here is defined as the sum of number of city tiles with", "# TODO: Somehow include target locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def", "np.ndarray: return np.array([ # Extra -1 is included to avoid taking max of", "float], bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards = np.where(", "= count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(),", "in kwargs.items() if key in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in", "Somehow include target locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() ->", "0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool only_once: bool # All", "max city tiles and units, # so this should keep it strictly so", "if unit.is_worker()) for player in game_state.players ]), \"step\": np.ones(2, dtype=float) } if done:", "@staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"],", "reward > 0., self.positive_weight * reward, reward ) reward = np.where( reward <", "1, \"Players start with 1 city tile already\" self.n_city_tiles = n_city_tiles def completed_task(self,", "= positive_weight self.negative_weight = negative_weight self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count", "for unit in player.units if unit.is_worker()) for player in game_state.players ]), \"step\": np.ones(2,", "0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val for key, val in kwargs.items()", "-1 is included to avoid taking max of empty sequence max([len(city.citytiles) for city", "1) return ((ct_count == 0).any() or (unit_count == 0).any() or (ct_pct >= 0.75).any()", "500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward = np.where(", "= np.where( reward > 0., self.positive_weight * reward, reward ) reward = np.where(", "RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool only_once: bool # All reward spaces", "-> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod", "[int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) * 2. -", "count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff = new_unit_count -", "def get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "]) >= self.n class CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2,", "n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self,", "float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod", "unit_count = count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct = unit_count /", "game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: pass def get_info(self) -> Dict[str,", "include target locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec:", "import Player def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for player in game_state.players])", "in game_state.players]) def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for player in game_state.players])", "class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ # Extra -1", "count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count -", "assert positive_weight > 0. assert negative_weight > 0. self.positive_weight = positive_weight self.negative_weight =", "get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "32 x 32 => 1024 max city tiles and units, # so this", "np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int = 1,", "=> 1024 max city tiles and units, # so this should keep it", "early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel =", "ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False )", "abc import ABC, abstractmethod import copy import logging import numpy as np from", "game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2, **kwargs):", "def get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec or you risk breaking classes", "= unit_count / max(unit_count.sum(), 1) return ((ct_count == 0).any() or (unit_count == 0).any()", "class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False", "for either the full game or a sub-task \"\"\" def __init__(self, **kwargs): if", "n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start", "def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: if not done: return", "0.5 * completed_task.astype(float) ) else: rewards = completed_task.astype(float) done = failed_task.any() or completed_task.any()", "int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start with", "Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards =", "sub-task \"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod", "for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec() -> RewardSpec: \"\"\" Don't override", "0., } self.weights.update({key: val for key, val in kwargs.items() if key in self.weights.keys()})", "* w) for key, w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500.", ") reward = np.where( reward < 0., self.negative_weight * reward, reward ) return", "-> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game,", "unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self,", "__init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n", "player in game_state.players ]) >= self.n class CollectNCoal(Subtask): def __init__(self, n: int =", "reward = np.where( lost_unit_or_city, -0.1, reward / 1_000. ) return tuple(reward), done or", "ct_count = player.city_tile_count unit_count = len(player.units) # max board size is 32 x", "compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop: done =", "int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self,", "game_state.players ]) >= self.n class CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] //", ").sum(axis=0) lost_unit_or_city = (city_diff < 0) | (unit_diff < 0) reward = np.where(", "n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for unit in", "self.unit_count ) self.city_count = new_city_count self.unit_count = new_unit_count return failed def _reset(self) ->", "np from scipy.stats import rankdata from typing import Dict, NamedTuple, NoReturn, Tuple from", "# NB: Subtasks that are \"different enough\" should be defined separately since each", "remains full # \"full_workers\": -0.01, \"full_workers\": 0., # A reward given each step", "super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state)", "def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state)", "-> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop: bool", "completed_task(self, game_state: Game) -> np.ndarray: return np.array([ # Extra -1 is included to", "assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w", "dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward =", "== 0).any() or (unit_count == 0).any() or (ct_pct >= 0.75).any() or (unit_pct >=", ") return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool) ->", "self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count = new_city_count self.unit_count = new_unit_count self.research_points", "defining a reward space and/or done state for either the full game or", "full game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]:", "Game) -> np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state: Game)", "def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self,", "(ct_pct >= 0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float", "only_once=True ) def __init__(self, early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop =", "def __init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] +", "return np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state: Game) -> bool: ct_count =", "def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: raise NotImplementedError def _reset(self)", "0. self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop = early_stop self.city_count = np.empty((2,),", "-> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) ->", "completed_task = self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task, 0., 0.5 + 0.5", "(unit_count == 0).any() or (ct_pct >= 0.75).any() or (unit_pct >= 0.75).any()) class RewardSpec(NamedTuple):", "= new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key]", "super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start with 1 city tile already\"", "ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(), 1) return", "\"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize workers each step that their", "= early_stop def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if", "- self.research_points, # Don't penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel,", "float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel =", "self, **kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count)", "n_research_points def completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players])", "* completed_task.astype(float) ) else: rewards = completed_task.astype(float) done = failed_task.any() or completed_task.any() or", "count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count - self.city_count, \"unit\": new_unit_count", "bool only_once: bool # All reward spaces defined below class BaseRewardSpace(ABC): \"\"\" A", "get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int", "NoReturn, Tuple from ..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import", "the full game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "return np.array([ sum([city.fuel for city in player.cities.values()]) for player in game_state.players ]) def", "def get_info(self) -> Dict[str, np.ndarray]: return {} # Full game reward spaces defined", "0., self.negative_weight * reward, reward ) return reward def _reset(self) -> NoReturn: self.city_count", "def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self,", "p in game_state.players] rewards = (rankdata(rewards) - 1.) * 2. - 1. return", "compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def", "= { \"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, }", "[0]) for player in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self, n_total_fuel:", "failed_task(self, game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed =", "\"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game, done:", "new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points, # Don't", "self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\": 1.,", "**kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start with 1 city tile", "**kwargs ): assert positive_weight > 0. assert negative_weight > 0. self.positive_weight = positive_weight", "All reward spaces defined below class BaseRewardSpace(ABC): \"\"\" A class used for defining", "game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task =", "tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count unit_count = len(player.units) # max", "np.where( failed_task, 0., 0.5 + 0.5 * completed_task.astype(float) ) else: rewards = completed_task.astype(float)", "Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state: Game) ->", "mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards = (rankdata(rewards) - 1.)", "dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "= np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 10.,", "self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def", ") def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) /", "+ GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count =", "game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod", "[self.weight_rewards(reward_items_dict[key] * w) for key, w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward /", "- reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. /", "Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int", "CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n", "unit count return ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec()", "= False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game, done:", "= count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict =", "self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, }", "in game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for player in", "game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or(", "Game, done: bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() ->", "**kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel", "__init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec()", "done: self._reset() return tuple(rewards), done def completed_task(self, game_state: Game) -> np.ndarray: return np.array([", "@abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: pass def", "w for key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city = (city_diff < 0)", "1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val for key, val", "self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float)", "import logging import numpy as np from scipy.stats import rankdata from typing import", "See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations for subtasks? class Subtask(BaseRewardSpace, ABC):", "= GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self, game_state:", "self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs)", "__init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles > 1, \"Players", "-> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self,", "__init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points", "@abstractmethod def get_reward_spec() -> RewardSpec: pass @abstractmethod def compute_rewards_and_done(self, game_state: Game, done: bool)", "city tile already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray: return", "game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: return", "that their cargo remains full # \"full_workers\": -0.01, \"full_workers\": 0., # A reward", "bool # All reward spaces defined below class BaseRewardSpace(ABC): \"\"\" A class used", "ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return", "CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False )", "Game, done: bool) -> Tuple[Tuple[float, float], bool]: pass def get_info(self) -> Dict[str, np.ndarray]:", "- 1.) * 2. - 1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count", "is included to avoid taking max of empty sequence max([len(city.citytiles) for city in", "used for defining a reward space and/or done state for either the full", "(unit_diff < 0) reward = np.where( lost_unit_or_city, -0.1, reward / 1_000. ) return", "from typing import Dict, NamedTuple, NoReturn, Tuple from ..lux.game import Game from ..lux.game_constants", "new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count < self.city_count, new_unit_count", "subtask gets its own embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target", "game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining", "RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def", "in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool)", "reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w in self.weights.items()], axis=0 ).sum(axis=0)", "-> int: return subtask_encoding[type(self)] class CollectNWood(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs):", "-> np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state: Game) ->", "count return ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() ->", "np.where( lost_unit_or_city, -0.1, reward / 1_000. ) return tuple(reward), done or lost_unit_or_city.any() def", "-> Tuple[float, float]: if not done: return 0., 0. # reward here is", "(city_diff < 0) | (unit_diff < 0) reward = np.where( lost_unit_or_city, -0.1, reward", "/ GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, positive_weight: float", "GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,),", "reward spaces defined below class BaseRewardSpace(ABC): \"\"\" A class used for defining a", "np.array([ sum(unit.get_cargo_space_left() > 0 for unit in player.units if unit.is_worker()) for player in", "def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points =", "-> np.ndarray: return np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state: Game) -> np.ndarray:", "def failed_task(self, game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed", "def __init__(self, early_stop: bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def", "override reward_spec or you risk breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0.,", "self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights * cycle_len assert self.target_step", "kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec: pass", "= negative_weight self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points", "0.005, # Penalize workers each step that their cargo remains full # \"full_workers\":", "= new_city_count self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] =", "pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True,", "= GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self, game_state:", "done: bool) -> Tuple[Tuple[float, float], bool]: pass def get_info(self) -> Dict[str, np.ndarray]: return", "in player.cities.values()] + [0]) for player in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask):", "self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points, # Don't penalize losing", "by city tiles then unit count return ct_count * 10000 + unit_count class", "Game, done: bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward", "done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.)", "= new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key]", "city_diff = new_city_count - self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict = {", "GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, positive_weight: float =", "Don't penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([", "\"fuel\": new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward", "zero_sum=False, only_once=False ) def __init__( self, **kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count", "in player.units]) for player in game_state.players ]) >= self.n class CollectNUranium(Subtask): def __init__(self,", "float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state)", "super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "player in game_state.players ]) >= self.n class CollectNUranium(Subtask): def __init__(self, n: int =", "__init__( self, positive_weight: float = 1., negative_weight: float = 1., early_stop: bool =", "__init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs): super(CollectNCoal, self).__init__(**kwargs) self.n = n", "= np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ):", "reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w", "for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset()", "game_state.players] rewards = (rankdata(rewards) - 1.) * 2. - 1. return tuple(rewards) @staticmethod", "bool = False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game,", "or done @abstractmethod def completed_task(self, game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding:", "= (rankdata(game_result_reward) - 1.) * 2. - 1. self._reset() else: game_result_reward = np.array([0.,", "game_result_reward = np.array([0., 0.]) self.city_count = new_city_count self.unit_count = new_unit_count self.research_points = new_research_points", "class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining a reward space for the", "-> np.ndarray: return np.array([ sum([unit.cargo.wood for unit in player.units]) for player in game_state.players", "reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, **kwargs", "np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward", "x 32 => 1024 max city tiles and units, # so this should", "key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def", "self).__init__(**kwargs) assert n_city_tiles > 1, \"Players start with 1 city tile already\" self.n_city_tiles", "super(CollectNUranium, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([", "-0.1, reward / 1_000. ) return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state:", "sum(unit.get_cargo_space_left() > 0 for unit in player.units if unit.is_worker()) for player in game_state.players", "Game) -> np.ndarray: return np.array([ # Extra -1 is included to avoid taking", "n_total_fuel: int = GAME_CONSTANTS[\"PARAMETERS\"][\"LIGHT_UPKEEP\"][\"CITY\"] * GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"], **kwargs): super(CollectNTotalFuel, self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def", "int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state:", "for the full game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "reward_max=1., zero_sum=False, only_once=True ) def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "-> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self, n_nights: int =", "own embedding # See obs_spaces.SUBTASK_ENCODING # TODO: Somehow include target locations for subtasks?", "self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert", "their cargo remains full # \"full_workers\": -0.01, \"full_workers\": 0., # A reward given", "NamedTuple, NoReturn, Tuple from ..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects", "bool) -> Tuple[float, float]: raise NotImplementedError def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count)", "count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = {", "new_research_points - self.research_points, # Don't penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel -", "full game or a sub-task \"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received", "lost_unit_or_city, -0.1, reward / 1_000. ) return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self,", "Game, done: bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod def", "RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game, done:", "np.ndarray: return np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state: Game) -> np.ndarray: return", "early_stop: bool = False, **kwargs ): assert positive_weight > 0. assert negative_weight >", "return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game, done: bool)", "Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec:", "class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs) assert n_city_tiles", "for key, w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500. / max(self.positive_weight,", "return np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units)", "failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task, 0.,", "Game, done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any()", "done: bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward -", "= np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec()", "get_reward_spec() -> RewardSpec: \"\"\" Don't override reward_spec or you risk breaking classes like", "or you risk breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec( reward_min=0., reward_max=1., zero_sum=False,", "NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points:", "= (city_diff < 0) | (unit_diff < 0) reward = np.where( lost_unit_or_city, -0.1,", "count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel for city in player.cities.values()]) for player", "-> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state: Game,", "new_unit_count = count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\":", "return self.compute_rewards(game_state, done), done @abstractmethod def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float,", "class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1.", "1 city tile already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray:", "as np from scipy.stats import rankdata from typing import Dict, NamedTuple, NoReturn, Tuple", "- self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit in player.units if", "unit_count = len(player.units) # max board size is 32 x 32 => 1024", "np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESEARCH_REQUIREMENTS\"][\"COAL\"], **kwargs ): super(GetNResearchPoints,", "self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w in", "def count_research_points(game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players]) def should_early_stop(game_state:", "GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, **kwargs ): self.city_count", "kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "/ GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, **kwargs ):", "and units, # so this should keep it strictly so we break by", "-> np.ndarray: return np.array([ # Extra -1 is included to avoid taking max", "from ..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state: Game) -> np.ndarray:", "+ [0]) for player in game_state.players ]) >= self.n_city_tiles class CollectNTotalFuel(Subtask): def __init__(self,", "def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state,", "bool]: pass def get_info(self) -> Dict[str, np.ndarray]: return {} # Full game reward", "defined as the sum of number of city tiles with unit count as", "= count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count", "float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return", "class CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5, **kwargs): super(CollectNUranium, self).__init__(**kwargs)", "0) | (unit_diff < 0) reward = np.where( lost_unit_or_city, -0.1, reward / 1_000.", "class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool only_once: bool # All reward", "): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state: Game) -> np.ndarray: return", "MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ # Extra -1 is", "float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1.,", "game_state: Game, done: bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state)", "self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count,", "= count_units(game_state) failed = np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count ) self.city_count", "\"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005, # Penalize workers each step", "np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state: Game) -> np.ndarray:", "RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) ->", "from abc import ABC, abstractmethod import copy import logging import numpy as np", "def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self,", "return np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray: new_city_count", "defining a reward space for the full game. \"\"\" def compute_rewards_and_done(self, game_state: Game,", "\"step\": 0., } self.weights.update({key: val for key, val in kwargs.items() if key in", "ct_count / max(ct_count.sum(), 1) unit_pct = unit_count / max(unit_count.sum(), 1) return ((ct_count ==", "it strictly so we break by city tiles then unit count return ct_count", "assert negative_weight > 0. self.positive_weight = positive_weight self.negative_weight = negative_weight self.early_stop = early_stop", "-> Tuple[float, float]: reward = np.array(super(ZeroSumStatefulMultiReward, self).compute_rewards(game_state, done)) return tuple(reward - reward.mean()) class", "= np.where( failed_task, 0., 0.5 + 0.5 * completed_task.astype(float) ) else: rewards =", "failed def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask):", ") def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: goal_reached =", "completed_task.any() or done if done: self._reset() return tuple(rewards), done def completed_task(self, game_state: Game)", "unit in player.units if unit.is_worker()) for player in game_state.players ]), \"step\": np.ones(2, dtype=float)", "self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium", "self.target_step = n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int)", "game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) * 2. - 1. self._reset() else: game_result_reward", "self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec:", "reward space for the full game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done: bool)", "game_state: Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)]", "self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces", "reward: np.ndarray) -> np.ndarray: reward = np.where( reward > 0., self.positive_weight * reward,", "np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count ) self.city_count = new_city_count self.unit_count =", "with unit count as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in", "0) reward = np.where( lost_unit_or_city, -0.1, reward / 1_000. ) return tuple(reward), done", "Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in player.units]) for player in", "bool]: if self.early_stop: done = done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def", "done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done: bool)", "self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if", "PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. /", "done def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2)", "key, val in kwargs.items() if key in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if", "GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, **kwargs ): self.city_count = np.empty((2,), dtype=float)", "float reward_max: float zero_sum: bool only_once: bool # All reward spaces defined below", "GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done:", "compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done), done", "completed_task(self, game_state: Game) -> np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self,", "reward, reward ) return reward def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count", "Extra -1 is included to avoid taking max of empty sequence max([len(city.citytiles) for", "new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count < self.city_count, new_unit_count < self.unit_count )", "w) for key, w in self.weights.items()], axis=0 ).sum(axis=0) return tuple(reward / 500. /", "np.stack( [reward_items_dict[key] * w for key, w in self.weights.items()], axis=0 ).sum(axis=0) lost_unit_or_city =", "game_state.players ]), \"step\": np.ones(2, dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p", "np.array([0., 0.]) self.city_count = new_city_count self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel =", "return np.array([ sum([unit.cargo.coal for unit in player.units]) for player in game_state.players ]) >=", "bool) -> Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done", "(unit_pct >= 0.75).any()) class RewardSpec(NamedTuple): reward_min: float reward_max: float zero_sum: bool only_once: bool", "= n_nights * cycle_len assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count", "reward_min: float reward_max: float zero_sum: bool only_once: bool # All reward spaces defined", "np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count < self.city_count,", "+ unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1.,", "- self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\":", "tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]:", "done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: raise", "# max board size is 32 x 32 => 1024 max city tiles", ">= self.n class CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 2, **kwargs):", "if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def get_reward_spec() -> RewardSpec:", "32 => 1024 max city tiles and units, # so this should keep", "self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int", "self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool)", "tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1.", "player.units]) for player in game_state.players ]) >= self.n class CollectNCoal(Subtask): def __init__(self, n:", "np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True,", "= n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class", "the full game or a sub-task \"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace", "} self.weights.update({key: val for key, val in kwargs.items() if key in self.weights.keys()}) for", "\"full_workers\": -0.01, \"full_workers\": 0., # A reward given each step \"step\": 0., }", "= count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct =", "/ max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward = np.where( reward", "in game_state.players ]) >= self.n class CollectNCoal(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"]", "Subtasks that are \"different enough\" should be defined separately since each subtask gets", "city tiles then unit count return ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace):", "self.weights = { \"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\": 0.1, \"fuel\": 0.005,", "done def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state)", "a reward space for the full game. \"\"\" def compute_rewards_and_done(self, game_state: Game, done:", "for p in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) * 2. - 1.", "1.) * 2. - 1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count =", "new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward =", "return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self, game_state: Game) -> np.ndarray: pass", "np.ndarray: return np.array([ sum([unit.cargo.coal for unit in player.units]) for player in game_state.players ])", "number of city tiles with unit count as a tie-breaking mechanism rewards =", "kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float],", "Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) failed = np.logical_or( new_city_count", "should keep it strictly so we break by city tiles then unit count", "game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for unit in player.units]) for player", "reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float,", "completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >= self.n_total_fuel class SurviveNNights(Subtask): def __init__(self,", "= np.where( lost_unit_or_city, -0.1, reward / 1_000. ) return tuple(reward), done or lost_unit_or_city.any()", "super(CollectNCoal, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([", "already\" self.n_city_tiles = n_city_tiles def completed_task(self, game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >=", "RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop: bool =", "game_state: Game) -> np.ndarray: return count_city_tiles(game_state) >= self.n_city_tiles class MakeNContiguousCityTiles(MakeNCityTiles): def completed_task(self, game_state:", "below # NB: Subtasks that are \"different enough\" should be defined separately since", "scipy.stats import rankdata from typing import Dict, NamedTuple, NoReturn, Tuple from ..lux.game import", "for defining a reward space and/or done state for either the full game", "new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff =", "for city in player.cities.values()] + [0]) for player in game_state.players ]) >= self.n_city_tiles", "reward = np.where( reward < 0., self.negative_weight * reward, reward ) return reward", "def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points)", "return np.array([ sum([unit.cargo.wood for unit in player.units]) for player in game_state.players ]) >=", "empty sequence max([len(city.citytiles) for city in player.cities.values()] + [0]) for player in game_state.players", "= completed_task.astype(float) done = failed_task.any() or completed_task.any() or done if done: self._reset() return", "= 1., negative_weight: float = 1., early_stop: bool = False, **kwargs ): assert", "0.001, } self.weights.update({key: val for key, val in kwargs.items() if key in self.weights.keys()})", "1024 max city tiles and units, # so this should keep it strictly", "np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) # Subtask reward spaces defined below # NB: Subtasks", "= np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights", "Game, done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state)", "float zero_sum: bool only_once: bool # All reward spaces defined below class BaseRewardSpace(ABC):", "TODO: Somehow include target locations for subtasks? class Subtask(BaseRewardSpace, ABC): @staticmethod def get_reward_spec()", "n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step", "positive_weight > 0. assert negative_weight > 0. self.positive_weight = positive_weight self.negative_weight = negative_weight", "0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val for", "game_state: Game) -> np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state:", "in player.units]) for player in game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def __init__(self,", "is defined as the sum of number of city tiles with unit count", "for player in game_state.players]) def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel for", "((ct_count == 0).any() or (unit_count == 0).any() or (ct_pct >= 0.75).any() or (unit_pct", "self.city_count, new_unit_count < self.unit_count ) self.city_count = new_city_count self.unit_count = new_unit_count return failed", "ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct", "Tuple[float, float]: raise NotImplementedError def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count =", ") def __init__( self, positive_weight: float = 1., negative_weight: float = 1., early_stop:", "completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit in player.units]) for", "np.array([ sum([unit.cargo.coal for unit in player.units]) for player in game_state.players ]) >= self.n", "False, **kwargs): super(GameResultReward, self).__init__(**kwargs) self.early_stop = early_stop def compute_rewards_and_done(self, game_state: Game, done: bool)", "self, positive_weight: float = 1., negative_weight: float = 1., early_stop: bool = False,", "import rankdata from typing import Dict, NamedTuple, NoReturn, Tuple from ..lux.game import Game", "GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"], **kwargs): super(CollectNWood, self).__init__(**kwargs) self.n = n def completed_task(self, game_state: Game) -> np.ndarray:", "here is defined as the sum of number of city tiles with unit", "]) >= self.n class CollectNUranium(Subtask): def __init__(self, n: int = GAME_CONSTANTS[\"PARAMETERS\"][\"RESOURCE_CAPACITY\"][\"WORKER\"] // 5,", "np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 0., \"city\": 1., \"unit\": 0.5,", "zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: reward", "np.array([ sum([city.fuel for city in player.cities.values()]) for player in game_state.players ]) def count_research_points(game_state:", "class used for defining a reward space for the full game. \"\"\" def", "new_city_count self.unit_count = new_unit_count return failed def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count)", "full # \"full_workers\": -0.01, \"full_workers\": 0., # A reward given each step \"step\":", "or a sub-task \"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs:", "0.5 + 0.5 * completed_task.astype(float) ) else: rewards = completed_task.astype(float) done = failed_task.any()", "reward given each step \"step\": 0., } self.weights.update({key: val for key, val in", "self).__init__(**kwargs) self.n_total_fuel = n_total_fuel def completed_task(self, game_state: Game) -> np.ndarray: return count_total_fuel(game_state) >=", "assert self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self,", "\"game_result\": 0., \"city\": 1., \"unit\": 0.5, \"research\": 0.01, \"fuel\": 0.001, } self.weights.update({key: val", "for player in game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int", "def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count = count_city_tiles(game_state)", "compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: if not done: return 0.,", "1) unit_pct = unit_count / max(unit_count.sum(), 1) return ((ct_count == 0).any() or (unit_count", "Game, done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop: done = done or", "game_state.players]) def count_units(game_state: Game) -> np.ndarray: return np.array([len(player.units) for player in game_state.players]) def", "**kwargs ): super(GetNResearchPoints, self).__init__(**kwargs) self.n_research_points = n_research_points def completed_task(self, game_state: Game) -> np.ndarray:", "Game) -> np.ndarray: return np.array([ sum([city.fuel for city in player.cities.values()]) for player in", "if self.early_stop: done = done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self,", "max board size is 32 x 32 => 1024 max city tiles and", "0., # A reward given each step \"step\": 0., } self.weights.update({key: val for", "]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state)", "np.where( reward > 0., self.positive_weight * reward, reward ) reward = np.where( reward", "-> Dict[str, np.ndarray]: return {} # Full game reward spaces defined below class", "* 2. - 1. return tuple(rewards) @staticmethod def compute_player_reward(player: Player): ct_count = player.city_tile_count", "new_unit_count - self.unit_count, \"research\": new_research_points - self.research_points, # Don't penalize losing fuel at", "import Dict, NamedTuple, NoReturn, Tuple from ..lux.game import Game from ..lux.game_constants import GAME_CONSTANTS", "return tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray:", "= self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task, 0., 0.5 + 0.5 *", "new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] *", "float = 1., early_stop: bool = False, **kwargs ): assert positive_weight > 0.", "def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: return self.compute_rewards(game_state, done),", "get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop:", "def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod def", "\"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for", "def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod", "game_state: Game, done: bool) -> Tuple[float, float]: return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace):", "0.]) self.city_count = new_city_count self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel", "self.failed_task(game_state) completed_task = self.completed_task(game_state) if failed_task.any(): rewards = np.where( failed_task, 0., 0.5 +", "in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool)", "of empty sequence max([len(city.citytiles) for city in player.cities.values()] + [0]) for player in", "we break by city tiles then unit count return ct_count * 10000 +", "np.zeros_like(self.total_fuel) # Subtask reward spaces defined below # NB: Subtasks that are \"different", "self.target_step <= GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"] self.city_count = np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state:", "done: bool) -> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task = self.completed_task(game_state) if", "/ GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, **kwargs ): self.city_count = np.empty((2,),", ">= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2, **kwargs): super(MakeNCityTiles, self).__init__(**kwargs)", "penalize losing fuel at night \"fuel\": np.maximum(new_total_fuel - self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left()", "for player in game_state.players ]) >= self.n class CollectNCoal(Subtask): def __init__(self, n: int", "A class used for defining a reward space and/or done state for either", "= count_units(game_state) new_research_points = count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) city_diff = new_city_count - self.city_count", "count as a tie-breaking mechanism rewards = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] rewards", "return failed def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class", ") self.city_count = new_city_count self.unit_count = new_unit_count return failed def _reset(self) -> NoReturn:", "tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self, game_state: Game) -> np.ndarray: pass def", "player.units if unit.is_worker()) for player in game_state.players ]), \"step\": np.ones(2, dtype=float) } if", "0).any() or (unit_count == 0).any() or (ct_pct >= 0.75).any() or (unit_pct >= 0.75).any())", "count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state: Game)", "self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1.,", ") def __init__( self, **kwargs ): self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count)", "= new_unit_count - self.unit_count reward_items_dict = { \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points,", "return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__(", "self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack(", "1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"] + GAME_CONSTANTS[\"PARAMETERS\"][\"NIGHT_LENGTH\"] self.target_step = n_nights *", "tiles then unit count return ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod", "def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ game_state.turn >= self.target_step ]).repeat(2) def", "1. self._reset() else: game_result_reward = np.array([0., 0.]) self.city_count = new_city_count self.unit_count = new_unit_count", "reward_items_dict[\"game_result\"] = game_result_reward assert self.weights.keys() == reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w)", "defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\" A class used for defining a reward space", "1_000. ) return tuple(reward), done or lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool)", "SurviveNNights(Subtask): def __init__(self, n_nights: int = 1, **kwargs): super(SurviveNNights, self).__init__(**kwargs) cycle_len = GAME_CONSTANTS[\"PARAMETERS\"][\"DAY_LENGTH\"]", "self.negative_weight * reward, reward ) return reward def _reset(self) -> NoReturn: self.city_count =", "= player.city_tile_count unit_count = len(player.units) # max board size is 32 x 32", "bool: ct_count = count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1)", "game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) *", "@staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=0., reward_max=1., zero_sum=False, only_once=False ) def", "Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for unit in player.units]) for player in", "if key in self.weights.keys()}) for key in copy.copy(kwargs).keys(): if key in self.weights.keys(): del", "0.1, \"fuel\": 0.005, # Penalize workers each step that their cargo remains full", "self.unit_count, \"research\": new_research_points - self.research_points, # Don't penalize losing fuel at night \"fuel\":", "return tuple(count_city_tiles(game_state) / 1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec(", "reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], reward_max=1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"], zero_sum=False, only_once=False ) def __init__( self, positive_weight:", "key in self.weights.keys(): del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done:", "= count_research_points(game_state) new_total_fuel = count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count - self.city_count, \"unit\":", "= count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count,", "= count_total_fuel(game_state) city_diff = new_city_count - self.city_count unit_diff = new_unit_count - self.unit_count reward_items_dict", "unit in player.units]) for player in game_state.players ]) >= self.n class CollectNUranium(Subtask): def", "== reward_items_dict.keys() reward = np.stack( [self.weight_rewards(reward_items_dict[key] * w) for key, w in self.weights.items()],", "dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = {", "_reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel", "typing import Dict, NamedTuple, NoReturn, Tuple from ..lux.game import Game from ..lux.game_constants import", "sum([city.fuel for city in player.cities.values()]) for player in game_state.players ]) def count_research_points(game_state: Game)", "a sub-task \"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\")", "= n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.uranium for unit", "self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\": 1., \"unit\": 0.5, \"research\":", "): self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count) self.research_points = np.empty_like(self.city_count) self.total_fuel =", "np.ndarray]: return {} # Full game reward spaces defined below class FullGameRewardSpace(BaseRewardSpace): \"\"\"", "this should keep it strictly so we break by city tiles then unit", "\"step\": np.ones(2, dtype=float) } if done: game_result_reward = [int(GameResultReward.compute_player_reward(p)) for p in game_state.players]", "in player.units]) for player in game_state.players ]) >= self.n class CollectNCoal(Subtask): def __init__(self,", "completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.coal for unit in player.units]) for", "not done: return 0., 0. # reward here is defined as the sum", "reward ) reward = np.where( reward < 0., self.negative_weight * reward, reward )", "and/or done state for either the full game or a sub-task \"\"\" def", "city in player.cities.values()]) for player in game_state.players ]) def count_research_points(game_state: Game) -> np.ndarray:", "= np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__( self, n_research_points: int =", "done @abstractmethod def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: pass class", "compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: failed_task = self.failed_task(game_state) completed_task", "Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1.,", "reward_max: float zero_sum: bool only_once: bool # All reward spaces defined below class", "Don't override reward_spec or you risk breaking classes like multi_subtask.MultiSubtask \"\"\" return RewardSpec(", "lost_unit_or_city.any() def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float, float]: raise NotImplementedError def", "return tuple(reward - reward.mean()) class PunishingExponentialReward(BaseRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec(", "key in self.weights.keys(): del kwargs[key] super(StatefulMultiReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done:", "return ct_count * 10000 + unit_count class CityTileReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec:", "in game_state.players] rewards = (rankdata(rewards) - 1.) * 2. - 1. return tuple(rewards)", "= np.empty((2,), dtype=int) self.unit_count = np.empty_like(self.city_count) def compute_rewards_and_done(self, game_state: Game, done: bool) ->", "Tuple[Tuple[float, float], bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def", "in game_state.players ]) >= self.n class MakeNCityTiles(Subtask): def __init__(self, n_city_tiles: int = 2,", "reward_items_dict.keys() reward = np.stack( [reward_items_dict[key] * w for key, w in self.weights.items()], axis=0", "reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game, done: bool) -> Tuple[float,", "..lux.game_constants import GAME_CONSTANTS from ..lux.game_objects import Player def count_city_tiles(game_state: Game) -> np.ndarray: return", "axis=0 ).sum(axis=0) return tuple(reward / 500. / max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray)", "done: bool) -> Tuple[float, float]: new_city_count = count_city_tiles(game_state) new_unit_count = count_units(game_state) new_research_points =", "# Penalize workers each step that their cargo remains full # \"full_workers\": -0.01,", "= np.zeros_like(self.total_fuel) # Subtask reward spaces defined below # NB: Subtasks that are", "= { \"city\": new_city_count, \"unit\": new_unit_count, \"research\": new_research_points, \"fuel\": new_total_fuel, } if done:", "bool]: goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self, game_state:", "count_total_fuel(game_state) reward_items_dict = { \"city\": new_city_count - self.city_count, \"unit\": new_unit_count - self.unit_count, \"research\":", "new_city_count self.unit_count = new_unit_count self.research_points = new_research_points self.total_fuel = new_total_fuel reward_items_dict[\"game_result\"] = game_result_reward", "A class used for defining a reward space for the full game. \"\"\"", "= done or should_early_stop(game_state) return self.compute_rewards(game_state, done), done def compute_rewards(self, game_state: Game, done:", "count_city_tiles(game_state) unit_count = count_units(game_state) ct_pct = ct_count / max(ct_count.sum(), 1) unit_pct = unit_count", "= [int(GameResultReward.compute_player_reward(p)) for p in game_state.players] game_result_reward = (rankdata(game_result_reward) - 1.) * 2.", "self.total_fuel, 0), \"full_workers\": np.array([ sum(unit.get_cargo_space_left() > 0 for unit in player.units if unit.is_worker())", "np.empty_like(self.city_count) self.total_fuel = np.empty_like(self.city_count) self.weights = { \"game_result\": 10., \"city\": 1., \"unit\": 0.5,", "@staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def", "del kwargs[key] super(PunishingExponentialReward, self).__init__(**kwargs) self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float,", "def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: if self.early_stop: done", "self.city_count = new_city_count self.unit_count = new_unit_count return failed def _reset(self) -> NoReturn: self.city_count", "pass def get_info(self) -> Dict[str, np.ndarray]: return {} # Full game reward spaces", "done state for either the full game or a sub-task \"\"\" def __init__(self,", ") return reward def _reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count)", "np.array([ game_state.turn >= self.target_step ]).repeat(2) def failed_task(self, game_state: Game) -> np.ndarray: new_city_count =", "goal_reached = self.completed_task(game_state) return tuple(goal_reached.astype(float)), goal_reached.any() or done @abstractmethod def completed_task(self, game_state: Game)", "for p in game_state.players] rewards = (rankdata(rewards) - 1.) * 2. - 1.", "np.ndarray) -> np.ndarray: reward = np.where( reward > 0., self.positive_weight * reward, reward", "sequence max([len(city.citytiles) for city in player.cities.values()] + [0]) for player in game_state.players ])", "bool) -> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return", "-> Tuple[float, float]: pass class GameResultReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec(", "self.n = n def completed_task(self, game_state: Game) -> np.ndarray: return np.array([ sum([unit.cargo.wood for", "get_info(self) -> Dict[str, np.ndarray]: return {} # Full game reward spaces defined below", "1024.) class StatefulMultiReward(FullGameRewardSpace): @staticmethod def get_reward_spec() -> RewardSpec: return RewardSpec( reward_min=-1. / GAME_CONSTANTS[\"PARAMETERS\"][\"MAX_DAYS\"],", "strictly so we break by city tiles then unit count return ct_count *", "self.negative_weight = negative_weight self.early_stop = early_stop self.city_count = np.empty((2,), dtype=float) self.unit_count = np.empty_like(self.city_count)", "_reset(self) -> NoReturn: self.city_count = np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) class GetNResearchPoints(Subtask): def __init__(", "in player.units if unit.is_worker()) for player in game_state.players ]), \"step\": np.ones(2, dtype=float) }", "np.ones_like(self.city_count) self.unit_count = np.ones_like(self.unit_count) self.research_points = np.zeros_like(self.research_points) self.total_fuel = np.zeros_like(self.total_fuel) class ZeroSumStatefulMultiReward(StatefulMultiReward): @staticmethod", "\"\"\" def __init__(self, **kwargs): if kwargs: logging.warning(f\"RewardSpace received unexpected kwargs: {kwargs}\") @staticmethod @abstractmethod", "def count_total_fuel(game_state: Game) -> np.ndarray: return np.array([ sum([city.fuel for city in player.cities.values()]) for", "val for key, val in kwargs.items() if key in self.weights.keys()}) for key in", "sum([unit.cargo.uranium for unit in player.units]) for player in game_state.players ]) >= self.n class", "def count_city_tiles(game_state: Game) -> np.ndarray: return np.array([player.city_tile_count for player in game_state.players]) def count_units(game_state:", "return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=True ) def __init__(self, early_stop: bool = False,", "completed_task(self, game_state: Game) -> np.ndarray: return np.array([player.research_points for player in game_state.players]) >= self.n_research_points", "of number of city tiles with unit count as a tie-breaking mechanism rewards", "self._reset() def compute_rewards_and_done(self, game_state: Game, done: bool) -> Tuple[Tuple[float, float], bool]: new_city_count =", "Game) -> np.ndarray: pass def get_subtask_encoding(self, subtask_encoding: dict) -> int: return subtask_encoding[type(self)] class", "-> RewardSpec: return RewardSpec( reward_min=-1., reward_max=1., zero_sum=True, only_once=False ) def compute_rewards(self, game_state: Game,", "max(self.positive_weight, self.negative_weight)) def weight_rewards(self, reward: np.ndarray) -> np.ndarray: reward = np.where( reward >" ]
[ "def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa / Vt)", "teh current voltage values eff = F(Vnew) # calculate the value of the", "val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of dV x_val", "if __name__ == \"__main__\": error = 10e-15 # the maximum allowable error val", "= -1/R - (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[0][1]", "= newton_raphson(error) # plot error in the log scale dV_norm_err = val[1] iter_no", "solves the equation F = 0 def F(v): # v is a 2", "Vt) J[1][0] = (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][1]", "500 # in Ohms Vt = 0.025 # in volts Isa = 0.6e-6", "\"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants", "- Isa * (np.exp((v[0] - v[1]) / Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1])", "/ Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the", "Vt) J[1][1] = -(Isb / Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return", "which contains the voltage values of the circuit f1 = (E - v[0])", "# calculate the value of the F vector for the current voltage values", "Vt) * np.exp((v[0] - v[1]) / Vt) J[1][1] = -(Isb / Vt) *", "Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt)) -", "the above two functions to calculate the voltage solution to the circuit def", "# in Ohms Vt = 0.025 # in volts Isa = 0.6e-6 #", "np.zeros(shape=(2, 1)) dV_vec = [] val_vec = [] conv = False while not", "error = 10e-15 # the maximum allowable error val = newton_raphson(error) # plot", "of dV for convergence criteria Vnew = np.add(Vnew, dV) # compute new voltage", "print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of dV x_val = np.linspace(1, iter_no, iter_no)", "x 1 vector which contains the voltage values of the circuit f1 =", "error in the log scale dV_norm_err = val[1] iter_no = val[2] ans =", "F(v): # v is a 2 x 1 vector which contains the voltage", "= np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa / Vt) * np.exp((v[0] -", "= Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1) F = np.array([f1,", "return Vnew, dV_vec, i, val_vec if __name__ == \"__main__\": error = 10e-15 #", "R = 500 # in Ohms Vt = 0.025 # in volts Isa", "the voltage solution to the circuit def newton_raphson(maxerr): i = 0 Vnew =", "= 10e-15 # the maximum allowable error val = newton_raphson(error) # plot error", "Amps Isb = 1.2e-6 # in Amps # Calculates the vector F which", "i = 0 Vnew = np.zeros(shape=(2, 1)) dV_vec = [] val_vec = []", "not conv: i += 1 F_p = Jacobian(Vnew) # calculate the Jacobian given", "plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other serif", "Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa / Vt) *", "np.add(Vnew, dV) # compute new voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\")", "maxerr: break return Vnew, dV_vec, i, val_vec if __name__ == \"__main__\": error =", "the 2-norm of dV for convergence criteria Vnew = np.add(Vnew, dV) # compute", "0.025 # in volts Isa = 0.6e-6 # in Amps Isb = 1.2e-6", "dV for convergence criteria Vnew = np.add(Vnew, dV) # compute new voltage value", "Vnew = np.zeros(shape=(2, 1)) dV_vec = [] val_vec = [] conv = False", "v[1]) / Vt) J[0][1] = (Isa / Vt) * np.exp((v[0] - v[1]) /", "1)) dV_vec = [] val_vec = [] conv = False while not conv:", "# To use LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\":", "the circuit f1 = (E - v[0]) / R - Isa * (np.exp((v[0]", "- v[1]) / Vt) J[1][1] = -(Isb / Vt) * np.exp(v[1] / Vt)", "f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1) F =", "0.6e-6 # in Amps Isb = 1.2e-6 # in Amps # Calculates the", "Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa /", "v[1]) / Vt) J[1][1] = -(Isb / Vt) * np.exp(v[1] / Vt) -", "val_vec if __name__ == \"__main__\": error = 10e-15 # the maximum allowable error", "= F(Vnew) # calculate the value of the F vector for the current", "compute the Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] = -1/R -", "/ Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above two functions to", "J[0][0] = -1/R - (Isa / Vt) * np.exp((v[0] - v[1]) / Vt)", "maximum allowable error val = newton_raphson(error) # plot error in the log scale", "Vt) * np.exp((v[0] - v[1]) / Vt) J[1][0] = (Isa / Vt) *", "Vt) J[0][1] = (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][0]", "current voltage values eff = F(Vnew) # calculate the value of the F", "of the circuit f1 = (E - v[0]) / R - Isa *", "= (E - v[0]) / R - Isa * (np.exp((v[0] - v[1]) /", "# compute the Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] = -1/R", "circuit def newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2, 1)) dV_vec = []", "* np.exp((v[0] - v[1]) / Vt) J[1][0] = (Isa / Vt) * np.exp((v[0]", "the voltage values of the circuit f1 = (E - v[0]) / R", "(Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][1] = -(Isb /", "np.exp((v[0] - v[1]) / Vt) J[1][0] = (Isa / Vt) * np.exp((v[0] -", "crit_val < maxerr: break return Vnew, dV_vec, i, val_vec if __name__ == \"__main__\":", "in the log scale dV_norm_err = val[1] iter_no = val[2] ans = val[3]", "print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val <", "\"+str(dV)) if crit_val < maxerr: break return Vnew, dV_vec, i, val_vec if __name__", "# in Amps # Calculates the vector F which solves the equation F", "np.exp((v[0] - v[1]) / Vt) J[1][1] = -(Isb / Vt) * np.exp(v[1] /", "\"__main__\": error = 10e-15 # the maximum allowable error val = newton_raphson(error) #", "\"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" })", "/ Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1) F = np.array([f1, f2]) return", "calculate the value of the F vector for the current voltage values dV", "allowable error val = newton_raphson(error) # plot error in the log scale dV_norm_err", "eff = F(Vnew) # calculate the value of the F vector for the", "eff), -1) # compute dV crit_val = np.linalg.norm(dV, 2) # compute the 2-norm", "Vt = 0.025 # in volts Isa = 0.6e-6 # in Amps Isb", "= 500 # in Ohms Vt = 0.025 # in volts Isa =", "uses the above two functions to calculate the voltage solution to the circuit", "current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val =", "/ Vt) J[0][1] = (Isa / Vt) * np.exp((v[0] - v[1]) / Vt)", "1) F = np.array([f1, f2]) return F # compute the Jacobian def Jacobian(v):", "* np.exp((v[0] - v[1]) / Vt) J[1][1] = -(Isb / Vt) * np.exp(v[1]", "\"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants used in", "Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above two functions to calculate", "newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2, 1)) dV_vec = [] val_vec =", "the 10log_10 of dV x_val = np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val,", "= val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of dV", "return J # uses the above two functions to calculate the voltage solution", "0 def F(v): # v is a 2 x 1 vector which contains", "print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V =", "[] val_vec = [] conv = False while not conv: i += 1", "10log_10 of dV x_val = np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV)", "the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and", "compute dV crit_val = np.linalg.norm(dV, 2) # compute the 2-norm of dV for", "[\"Helvetica\"]}) # for Palatino and other serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\":", "= np.add(Vnew, dV) # compute new voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew)", "np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above two functions", "/ Vt) * np.exp((v[0] - v[1]) / Vt) J[1][1] = -(Isb / Vt)", "plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other", "Vt) * np.exp((v[0] - v[1]) / Vt) J[0][1] = (Isa / Vt) *", "- v[1]) / Vt) J[0][1] = (Isa / Vt) * np.exp((v[0] - v[1])", "which solves the equation F = 0 def F(v): # v is a", "= np.array([f1, f2]) return F # compute the Jacobian def Jacobian(v): J =", "voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian =", "f2]) return F # compute the Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2))", "= 0 def F(v): # v is a 2 x 1 vector which", "ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of dV x_val =", "/ Vt) * np.exp((v[0] - v[1]) / Vt) J[0][1] = (Isa / Vt)", "scale dV_norm_err = val[1] iter_no = val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) #", "= [] conv = False while not conv: i += 1 F_p =", "next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector =", "in the problem E = 0.220 # in volts R = 500 #", "(np.exp((v[1] / Vt)) - 1) F = np.array([f1, f2]) return F # compute", "serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\":", "- (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above two functions to calculate the", "np.linalg.norm(dV, 2) # compute the 2-norm of dV for convergence criteria Vnew =", "\"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val < maxerr: break return Vnew, dV_vec,", "in Amps Isb = 1.2e-6 # in Amps # Calculates the vector F", "v[1]) / Vt) J[1][0] = (Isa / Vt) * np.exp((v[0] - v[1]) /", "0.220 # in volts R = 500 # in Ohms Vt = 0.025", "= np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val = np.linalg.norm(dV, 2) # compute", "= 1.2e-6 # in Amps # Calculates the vector F which solves the", "v[1]) / Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] /", "np.array([f1, f2]) return F # compute the Jacobian def Jacobian(v): J = np.zeros(shape=(2,", "# in Amps Isb = 1.2e-6 # in Amps # Calculates the vector", "vector which contains the voltage values of the circuit f1 = (E -", "the maximum allowable error val = newton_raphson(error) # plot error in the log", "\"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other serif fonts use: plt.rcParams.update({ \"text.usetex\": True,", "/ Vt)) - 1) F = np.array([f1, f2]) return F # compute the", "Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above", "# v is a 2 x 1 vector which contains the voltage values", "Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1) F = np.array([f1, f2]) return F", "in volts R = 500 # in Ohms Vt = 0.025 # in", "= \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val < maxerr:", "# plot error in the log scale dV_norm_err = val[1] iter_no = val[2]", "circuit f1 = (E - v[0]) / R - Isa * (np.exp((v[0] -", "Ohms Vt = 0.025 # in volts Isa = 0.6e-6 # in Amps", "a 2 x 1 vector which contains the voltage values of the circuit", "the equation F = 0 def F(v): # v is a 2 x", "J[0][1] = (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][0] =", "conv = False while not conv: i += 1 F_p = Jacobian(Vnew) #", "numpy as np import matplotlib.pyplot as plt # To use LaTeX in the", "True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other serif fonts use:", "\"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other serif fonts use: plt.rcParams.update({", "voltage solution to the circuit def newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2,", "(np.exp((v[0] - v[1]) / Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb *", "# Plot the 10log_10 of dV x_val = np.linspace(1, iter_no, iter_no) dV =", "used in the problem E = 0.220 # in volts R = 500", "# compute new voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration =", "# for Palatino and other serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\",", "for Palatino and other serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\":", "val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V", "f1 = (E - v[0]) / R - Isa * (np.exp((v[0] - v[1])", "- (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[0][1] = (Isa", "def F(v): # v is a 2 x 1 vector which contains the", "newton_raphson(error) # plot error in the log scale dV_norm_err = val[1] iter_no =", "print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val < maxerr: break return", "= np.linalg.norm(dV, 2) # compute the 2-norm of dV for convergence criteria Vnew", "* np.exp((v[0] - v[1]) / Vt) J[0][1] = (Isa / Vt) * np.exp((v[0]", "/ Vt) * np.exp((v[0] - v[1]) / Vt) J[1][0] = (Isa / Vt)", "iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV) plt.xlabel(\"Number of Iterations\") plt.ylabel(\"log(2-norm dV)\") plt.show()", "np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa / Vt) * np.exp((v[0] - v[1])", "/ Vt) J[1][0] = (Isa / Vt) * np.exp((v[0] - v[1]) / Vt)", "= val[1] iter_no = val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot the", "F # compute the Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] =", "- v[1]) / Vt) J[1][0] = (Isa / Vt) * np.exp((v[0] - v[1])", "# in volts Isa = 0.6e-6 # in Amps Isb = 1.2e-6 #", "F(Vnew) # calculate the value of the F vector for the current voltage", "i += 1 F_p = Jacobian(Vnew) # calculate the Jacobian given teh current", "compute new voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i))", "plot error in the log scale dV_norm_err = val[1] iter_no = val[2] ans", "value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p))", "for convergence criteria Vnew = np.add(Vnew, dV) # compute new voltage value for", "the log scale dV_norm_err = val[1] iter_no = val[2] ans = val[3] print(\"------------------------------------\")", "dV_vec = [] val_vec = [] conv = False while not conv: i", "constants used in the problem E = 0.220 # in volts R =", "val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of dV x_val = np.linspace(1, iter_no,", "val = newton_raphson(error) # plot error in the log scale dV_norm_err = val[1]", "= (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][0] = (Isa", "fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True,", "Amps # Calculates the vector F which solves the equation F = 0", "= 0.220 # in volts R = 500 # in Ohms Vt =", "iter_no = val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of", "if crit_val < maxerr: break return Vnew, dV_vec, i, val_vec if __name__ ==", "the Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa", "equation F = 0 def F(v): # v is a 2 x 1", "def newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2, 1)) dV_vec = [] val_vec", "Isa = 0.6e-6 # in Amps Isb = 1.2e-6 # in Amps #", "volts R = 500 # in Ohms Vt = 0.025 # in volts", "the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val", "# constants used in the problem E = 0.220 # in volts R", "= \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if", "dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394", "np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val = np.linalg.norm(dV, 2) # compute the", "R - Isa * (np.exp((v[0] - v[1]) / Vt) - 1) f2 =", "error val = newton_raphson(error) # plot error in the log scale dV_norm_err =", "plt # To use LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\",", "criteria Vnew = np.add(Vnew, dV) # compute new voltage value for next step", "0 Vnew = np.zeros(shape=(2, 1)) dV_vec = [] val_vec = [] conv =", "dV crit_val = np.linalg.norm(dV, 2) # compute the 2-norm of dV for convergence", "True, \"font.family\": \"Helvetica\" }) # constants used in the problem E = 0.220", "values of the circuit f1 = (E - v[0]) / R - Isa", "= 0.6e-6 # in Amps Isb = 1.2e-6 # in Amps # Calculates", "log scale dV_norm_err = val[1] iter_no = val[2] ans = val[3] print(\"------------------------------------\") print(ans[7])", "solution to the circuit def newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2, 1))", "crit_val = np.linalg.norm(dV, 2) # compute the 2-norm of dV for convergence criteria", "* (np.exp((v[1] / Vt)) - 1) F = np.array([f1, f2]) return F #", "* np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above two", "= \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val < maxerr: break return Vnew,", "as np import matplotlib.pyplot as plt # To use LaTeX in the plots", "return F # compute the Jacobian def Jacobian(v): J = np.zeros(shape=(2, 2)) J[0][0]", "contains the voltage values of the circuit f1 = (E - v[0]) /", "__name__ == \"__main__\": error = 10e-15 # the maximum allowable error val =", "in Amps # Calculates the vector F which solves the equation F =", "= 0.025 # in volts Isa = 0.6e-6 # in Amps Isb =", "/ Vt) J[1][1] = -(Isb / Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt)", "2-norm of dV for convergence criteria Vnew = np.add(Vnew, dV) # compute new", "above two functions to calculate the voltage solution to the circuit def newton_raphson(maxerr):", "v is a 2 x 1 vector which contains the voltage values of", "volts Isa = 0.6e-6 # in Amps Isb = 1.2e-6 # in Amps", "+= 1 F_p = Jacobian(Vnew) # calculate the Jacobian given teh current voltage", "Vnew = np.add(Vnew, dV) # compute new voltage value for next step dV_vec.append(crit_val)", "1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1) F", "= val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10 of dV x_val = np.linspace(1,", "= 0 Vnew = np.zeros(shape=(2, 1)) dV_vec = [] val_vec = [] conv", "x_val = np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV) plt.xlabel(\"Number of Iterations\")", "val_vec = [] conv = False while not conv: i += 1 F_p", "F vector for the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) #", "v[0]) / R - Isa * (np.exp((v[0] - v[1]) / Vt) - 1)", "values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val = np.linalg.norm(dV, 2)", "[\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants used in the", "F which solves the equation F = 0 def F(v): # v is", "i, val_vec if __name__ == \"__main__\": error = 10e-15 # the maximum allowable", "== \"__main__\": error = 10e-15 # the maximum allowable error val = newton_raphson(error)", "vector F which solves the equation F = 0 def F(v): # v", "-1/R - (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[0][1] =", "vector for the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute", "matplotlib.pyplot as plt # To use LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True,", "in Ohms Vt = 0.025 # in volts Isa = 0.6e-6 # in", "print(\"\\u0394 V = \"+str(dV)) if crit_val < maxerr: break return Vnew, dV_vec, i,", "values eff = F(Vnew) # calculate the value of the F vector for", "J # uses the above two functions to calculate the voltage solution to", "= (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][1] = -(Isb", "(Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses the above two functions to calculate the voltage", "voltage values eff = F(Vnew) # calculate the value of the F vector", "break return Vnew, dV_vec, i, val_vec if __name__ == \"__main__\": error = 10e-15", "print(ans[7]) # Plot the 10log_10 of dV x_val = np.linspace(1, iter_no, iter_no) dV", "conv: i += 1 F_p = Jacobian(Vnew) # calculate the Jacobian given teh", "convergence criteria Vnew = np.add(Vnew, dV) # compute new voltage value for next", "new voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian", "[] conv = False while not conv: i += 1 F_p = Jacobian(Vnew)", "# compute the 2-norm of dV for convergence criteria Vnew = np.add(Vnew, dV)", "use LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) #", "}) # constants used in the problem E = 0.220 # in volts", "the value of the F vector for the current voltage values dV =", "and other serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], })", "calculate the voltage solution to the circuit def newton_raphson(maxerr): i = 0 Vnew", "- 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1)", "# in volts R = 500 # in Ohms Vt = 0.025 #", "Vt)) - 1) F = np.array([f1, f2]) return F # compute the Jacobian", "\"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants used in the problem E =", "Jacobian given teh current voltage values eff = F(Vnew) # calculate the value", "\"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val < maxerr: break", "use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\":", "V = \"+str(dV)) if crit_val < maxerr: break return Vnew, dV_vec, i, val_vec", "1 F_p = Jacobian(Vnew) # calculate the Jacobian given teh current voltage values", "dV_norm_err = val[1] iter_no = val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot", "to calculate the voltage solution to the circuit def newton_raphson(maxerr): i = 0", "\"Helvetica\" }) # constants used in the problem E = 0.220 # in", "the problem E = 0.220 # in volts R = 500 # in", "= \"+str(dV)) if crit_val < maxerr: break return Vnew, dV_vec, i, val_vec if", "< maxerr: break return Vnew, dV_vec, i, val_vec if __name__ == \"__main__\": error", "\"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other serif fonts use: plt.rcParams.update({ \"text.usetex\":", "dV) # compute new voltage value for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration", "the circuit def newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2, 1)) dV_vec =", "plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants used in the problem E", "# compute dV crit_val = np.linalg.norm(dV, 2) # compute the 2-norm of dV", "val[1] iter_no = val[2] ans = val[3] print(\"------------------------------------\") print(ans[7]) # Plot the 10log_10", "as plt # To use LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\":", "step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff))", "the F vector for the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1)", "-(Isb / Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J # uses", "the vector F which solves the equation F = 0 def F(v): #", "functions to calculate the voltage solution to the circuit def newton_raphson(maxerr): i =", "F = np.array([f1, f2]) return F # compute the Jacobian def Jacobian(v): J", "voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val = np.linalg.norm(dV,", "plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\"", "F_p = Jacobian(Vnew) # calculate the Jacobian given teh current voltage values eff", "Vnew, dV_vec, i, val_vec if __name__ == \"__main__\": error = 10e-15 # the", "- 1) F = np.array([f1, f2]) return F # compute the Jacobian def", "for next step dV_vec.append(crit_val) val_vec.append(Vnew) print(\"------------------------------------\") print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector", "value of the F vector for the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p),", "Isa * (np.exp((v[0] - v[1]) / Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) /", "is a 2 x 1 vector which contains the voltage values of the", "F = 0 def F(v): # v is a 2 x 1 vector", "in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino", "J[1][0] = (Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][1] =", "J[1][1] = -(Isb / Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J", "2) # compute the 2-norm of dV for convergence criteria Vnew = np.add(Vnew,", "Calculates the vector F which solves the equation F = 0 def F(v):", "of dV x_val = np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV) plt.xlabel(\"Number", "/ R - Isa * (np.exp((v[0] - v[1]) / Vt) - 1) f2", "(Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[1][0] = (Isa /", "dV_vec, i, val_vec if __name__ == \"__main__\": error = 10e-15 # the maximum", "LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for", "\"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV)) if crit_val", "np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV) plt.xlabel(\"Number of Iterations\") plt.ylabel(\"log(2-norm dV)\")", "import matplotlib.pyplot as plt # To use LaTeX in the plots plt.rcParams.update({ \"text.usetex\":", "problem E = 0.220 # in volts R = 500 # in Ohms", "True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) #", "1.2e-6 # in Amps # Calculates the vector F which solves the equation", "= False while not conv: i += 1 F_p = Jacobian(Vnew) # calculate", "two functions to calculate the voltage solution to the circuit def newton_raphson(maxerr): i", "Isb = 1.2e-6 # in Amps # Calculates the vector F which solves", "given teh current voltage values eff = F(Vnew) # calculate the value of", "for the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV", "False while not conv: i += 1 F_p = Jacobian(Vnew) # calculate the", "np.exp((v[0] - v[1]) / Vt) J[0][1] = (Isa / Vt) * np.exp((v[0] -", "= Jacobian(Vnew) # calculate the Jacobian given teh current voltage values eff =", "\"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]}) # for Palatino and other serif fonts", "(Isa / Vt) * np.exp((v[0] - v[1]) / Vt) J[0][1] = (Isa /", "/ Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt))", "# the maximum allowable error val = newton_raphson(error) # plot error in the", "# uses the above two functions to calculate the voltage solution to the", "\"font.family\": \"Helvetica\" }) # constants used in the problem E = 0.220 #", "import numpy as np import matplotlib.pyplot as plt # To use LaTeX in", "2 x 1 vector which contains the voltage values of the circuit f1", "1 vector which contains the voltage values of the circuit f1 = (E", "Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1] / Vt)) - 1) F = np.array([f1, f2])", "E = 0.220 # in volts R = 500 # in Ohms Vt", "To use LaTeX in the plots plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"sans-serif\", \"font.sans-serif\": [\"Helvetica\"]})", "= -(Isb / Vt) * np.exp(v[1] / Vt) - (Isa/Vt)*np.exp((v[0]-v[1])/Vt) return J #", "10e-15 # the maximum allowable error val = newton_raphson(error) # plot error in", "calculate the Jacobian given teh current voltage values eff = F(Vnew) # calculate", "- v[0]) / R - Isa * (np.exp((v[0] - v[1]) / Vt) -", "}) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants used in the problem", "while not conv: i += 1 F_p = Jacobian(Vnew) # calculate the Jacobian", "-1) # compute dV crit_val = np.linalg.norm(dV, 2) # compute the 2-norm of", "= np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV) plt.xlabel(\"Number of Iterations\") plt.ylabel(\"log(2-norm", "print(\"iteration = \"+str(i)) print(\"Jacobian = \"+str(F_p)) print(\"F-vector = \"+str(eff)) print(\"\\u0394 V = \"+str(dV))", "the Jacobian given teh current voltage values eff = F(Vnew) # calculate the", "voltage values of the circuit f1 = (E - v[0]) / R -", "dV x_val = np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err) plt.plot(x_val, dV) plt.xlabel(\"Number of", "* (np.exp((v[0] - v[1]) / Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb", "in volts Isa = 0.6e-6 # in Amps Isb = 1.2e-6 # in", "to the circuit def newton_raphson(maxerr): i = 0 Vnew = np.zeros(shape=(2, 1)) dV_vec", "Plot the 10log_10 of dV x_val = np.linspace(1, iter_no, iter_no) dV = 10*np.log10(dV_norm_err)", "# calculate the Jacobian given teh current voltage values eff = F(Vnew) #", "compute the 2-norm of dV for convergence criteria Vnew = np.add(Vnew, dV) #", "- v[1]) / Vt) - 1) f2 = Isa*(np.exp((v[0]-v[1]) / Vt)-1)-Isb * (np.exp((v[1]", "= np.zeros(shape=(2, 1)) dV_vec = [] val_vec = [] conv = False while", "Jacobian(Vnew) # calculate the Jacobian given teh current voltage values eff = F(Vnew)", "np import matplotlib.pyplot as plt # To use LaTeX in the plots plt.rcParams.update({", "J = np.zeros(shape=(2, 2)) J[0][0] = -1/R - (Isa / Vt) * np.exp((v[0]", "= [] val_vec = [] conv = False while not conv: i +=", "# Calculates the vector F which solves the equation F = 0 def", "(E - v[0]) / R - Isa * (np.exp((v[0] - v[1]) / Vt)", "2)) J[0][0] = -1/R - (Isa / Vt) * np.exp((v[0] - v[1]) /", "dV = np.multiply(np.dot(np.linalg.inv(F_p), eff), -1) # compute dV crit_val = np.linalg.norm(dV, 2) #", "of the F vector for the current voltage values dV = np.multiply(np.dot(np.linalg.inv(F_p), eff),", "Palatino and other serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"],", "other serif fonts use: plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({", "\"serif\", \"font.serif\": [\"Palatino\"], }) plt.rcParams.update({ \"text.usetex\": True, \"font.family\": \"Helvetica\" }) # constants used" ]
[ "License. * You may obtain a copy of the License at http://opensource.org/licenses/MIT *", "# -*- coding: utf-8 -*- \"\"\" * TencentBlueKing is pleased to support the", "specific language governing permissions and limitations under the License. \"\"\" import tempfile as", "the License. \"\"\" import tempfile as _tempfile_ from pathlib import Path from typing", "License at http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in", "[] def core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with", "Generator[Callable[..., Path], None, None]: files = [] def core(content: Optional[str] = None): filepath", "this file except in compliance with the License. * You may obtain a", "CONDITIONS OF ANY KIND, either express or implied. See the License for the", "pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path], None, None]: files = [] def", "implied. See the License for the * specific language governing permissions and limitations", "None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\") as fh: fh.write(content)", "open(filepath, mode=\"w\") as fh: fh.write(content) return filepath yield core for item in files:", "available. * Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved. *", "License (the \"License\"); you may not use this file except in compliance with", "OR CONDITIONS OF ANY KIND, either express or implied. See the License for", "from pathlib import Path from typing import Callable, Generator, Optional import pytest @pytest.fixture", "content: with open(filepath, mode=\"w\") as fh: fh.write(content) return filepath yield core for item", "under the License is distributed on * an \"AS IS\" BASIS, WITHOUT WARRANTIES", "you may not use this file except in compliance with the License. *", "to in writing, software distributed under the License is distributed on * an", "core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\")", "may obtain a copy of the License at http://opensource.org/licenses/MIT * Unless required by", "KIND, either express or implied. See the License for the * specific language", "if content: with open(filepath, mode=\"w\") as fh: fh.write(content) return filepath yield core for", "* an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "mktemp() -> Generator[Callable[..., Path], None, None]: files = [] def core(content: Optional[str] =", "import Callable, Generator, Optional import pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path], None,", "for the * specific language governing permissions and limitations under the License. \"\"\"", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "TH<NAME>, a Tencent company. All rights reserved. * Licensed under the MIT License", "Licensed under the MIT License (the \"License\"); you may not use this file", "utf-8 -*- \"\"\" * TencentBlueKing is pleased to support the open source community", "* specific language governing permissions and limitations under the License. \"\"\" import tempfile", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved.", "PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights", "pathlib import Path from typing import Callable, Generator, Optional import pytest @pytest.fixture def", "as _tempfile_ from pathlib import Path from typing import Callable, Generator, Optional import", "ANY KIND, either express or implied. See the License for the * specific", "language governing permissions and limitations under the License. \"\"\" import tempfile as _tempfile_", "copy of the License at http://opensource.org/licenses/MIT * Unless required by applicable law or", "* Unless required by applicable law or agreed to in writing, software distributed", "Callable, Generator, Optional import pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path], None, None]:", "to support the open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. *", "the open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C)", "None, None]: files = [] def core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp())", "reserved. * Licensed under the MIT License (the \"License\"); you may not use", "under the License. \"\"\" import tempfile as _tempfile_ from pathlib import Path from", "Path from typing import Callable, Generator, Optional import pytest @pytest.fixture def mktemp() ->", "* Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved. * Licensed", "the License at http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to", "Unless required by applicable law or agreed to in writing, software distributed under", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "writing, software distributed under the License is distributed on * an \"AS IS\"", "import tempfile as _tempfile_ from pathlib import Path from typing import Callable, Generator,", "filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\") as fh: fh.write(content) return", "with open(filepath, mode=\"w\") as fh: fh.write(content) return filepath yield core for item in", "-> Generator[Callable[..., Path], None, None]: files = [] def core(content: Optional[str] = None):", "in writing, software distributed under the License is distributed on * an \"AS", "source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>,", "-*- coding: utf-8 -*- \"\"\" * TencentBlueKing is pleased to support the open", "support the open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright", "permissions and limitations under the License. \"\"\" import tempfile as _tempfile_ from pathlib", "typing import Callable, Generator, Optional import pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path],", "Generator, Optional import pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path], None, None]: files", "= [] def core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content:", "as fh: fh.write(content) return filepath yield core for item in files: if item.exists:", "http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in writing, software", "2017-2021 TH<NAME>, a Tencent company. All rights reserved. * Licensed under the MIT", "governing permissions and limitations under the License. \"\"\" import tempfile as _tempfile_ from", "All rights reserved. * Licensed under the MIT License (the \"License\"); you may", "obtain a copy of the License at http://opensource.org/licenses/MIT * Unless required by applicable", "(the \"License\"); you may not use this file except in compliance with the", "open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021", "Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\") as fh: fh.write(content) return filepath yield", "a Tencent company. All rights reserved. * Licensed under the MIT License (the", "company. All rights reserved. * Licensed under the MIT License (the \"License\"); you", "(C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved. * Licensed under the", "\"License\"); you may not use this file except in compliance with the License.", "def core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath,", "pleased to support the open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available.", "Path], None, None]: files = [] def core(content: Optional[str] = None): filepath =", "may not use this file except in compliance with the License. * You", "use this file except in compliance with the License. * You may obtain", "None]: files = [] def core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath)", "@pytest.fixture def mktemp() -> Generator[Callable[..., Path], None, None]: files = [] def core(content:", "rights reserved. * Licensed under the MIT License (the \"License\"); you may not", "of the License at http://opensource.org/licenses/MIT * Unless required by applicable law or agreed", "not use this file except in compliance with the License. * You may", "MIT License (the \"License\"); you may not use this file except in compliance", "\"\"\" import tempfile as _tempfile_ from pathlib import Path from typing import Callable,", "distributed on * an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "<gh_stars>10-100 # -*- coding: utf-8 -*- \"\"\" * TencentBlueKing is pleased to support", "= None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\") as fh:", "except in compliance with the License. * You may obtain a copy of", "the License is distributed on * an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\") as", "agreed to in writing, software distributed under the License is distributed on *", "is pleased to support the open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS)", "by applicable law or agreed to in writing, software distributed under the License", "applicable law or agreed to in writing, software distributed under the License is", "import Path from typing import Callable, Generator, Optional import pytest @pytest.fixture def mktemp()", "a copy of the License at http://opensource.org/licenses/MIT * Unless required by applicable law", "community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>, a", "License is distributed on * an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "files = [] def core(content: Optional[str] = None): filepath = Path(_tempfile_.mktemp()) files.append(filepath) if", "express or implied. See the License for the * specific language governing permissions", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License", "* Licensed under the MIT License (the \"License\"); you may not use this", "the * specific language governing permissions and limitations under the License. \"\"\" import", "You may obtain a copy of the License at http://opensource.org/licenses/MIT * Unless required", "distributed under the License is distributed on * an \"AS IS\" BASIS, WITHOUT", "on * an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>, a Tencent company.", "the License. * You may obtain a copy of the License at http://opensource.org/licenses/MIT", "from typing import Callable, Generator, Optional import pytest @pytest.fixture def mktemp() -> Generator[Callable[...,", "is distributed on * an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "limitations under the License. \"\"\" import tempfile as _tempfile_ from pathlib import Path", "law or agreed to in writing, software distributed under the License is distributed", "License. \"\"\" import tempfile as _tempfile_ from pathlib import Path from typing import", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "mode=\"w\") as fh: fh.write(content) return filepath yield core for item in files: if", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "the MIT License (the \"License\"); you may not use this file except in", "Tencent company. All rights reserved. * Licensed under the MIT License (the \"License\");", "import pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path], None, None]: files = []", "at http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in writing,", "def mktemp() -> Generator[Callable[..., Path], None, None]: files = [] def core(content: Optional[str]", "compliance with the License. * You may obtain a copy of the License", "under the MIT License (the \"License\"); you may not use this file except", "the License for the * specific language governing permissions and limitations under the", "* You may obtain a copy of the License at http://opensource.org/licenses/MIT * Unless", "License for the * specific language governing permissions and limitations under the License.", "TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-蓝鲸 PaaS", "with the License. * You may obtain a copy of the License at", "\"\"\" * TencentBlueKing is pleased to support the open source community by making", "OF ANY KIND, either express or implied. See the License for the *", "Optional import pytest @pytest.fixture def mktemp() -> Generator[Callable[..., Path], None, None]: files =", "tempfile as _tempfile_ from pathlib import Path from typing import Callable, Generator, Optional", "_tempfile_ from pathlib import Path from typing import Callable, Generator, Optional import pytest", "file except in compliance with the License. * You may obtain a copy", "or implied. See the License for the * specific language governing permissions and", "* TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-蓝鲸", "or agreed to in writing, software distributed under the License is distributed on", "蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All", "fh: fh.write(content) return filepath yield core for item in files: if item.exists: item.unlink()", "and limitations under the License. \"\"\" import tempfile as _tempfile_ from pathlib import", "See the License for the * specific language governing permissions and limitations under", "either express or implied. See the License for the * specific language governing", "software distributed under the License is distributed on * an \"AS IS\" BASIS,", "Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved. * Licensed under", "files.append(filepath) if content: with open(filepath, mode=\"w\") as fh: fh.write(content) return filepath yield core", "in compliance with the License. * You may obtain a copy of the", "required by applicable law or agreed to in writing, software distributed under the", "by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available. * Copyright (C) 2017-2021 TH<NAME>, a Tencent", "= Path(_tempfile_.mktemp()) files.append(filepath) if content: with open(filepath, mode=\"w\") as fh: fh.write(content) return filepath", "-*- \"\"\" * TencentBlueKing is pleased to support the open source community by", "coding: utf-8 -*- \"\"\" * TencentBlueKing is pleased to support the open source" ]
[ "path from hackerhub import views app_name = 'hackerhub' urlpatterns = [ # path('hackathons/',", "import views app_name = 'hackerhub' urlpatterns = [ # path('hackathons/', views.hackathonList, name='hackathonList'), ]", "from django.urls import path from hackerhub import views app_name = 'hackerhub' urlpatterns =", "django.urls import path from hackerhub import views app_name = 'hackerhub' urlpatterns = [", "from hackerhub import views app_name = 'hackerhub' urlpatterns = [ # path('hackathons/', views.hackathonList,", "import path from hackerhub import views app_name = 'hackerhub' urlpatterns = [ #", "hackerhub import views app_name = 'hackerhub' urlpatterns = [ # path('hackathons/', views.hackathonList, name='hackathonList')," ]
[ "uniform grid (with L = 1) variable. Assumes that the field is split", "returns div X = x_dx + y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) +", "left slice of local proc as right slab to follow proc rightSlice =", "= None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as", "as we do pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1]", "of a 3-dimensional, real space, uniform grid (with L = 1) variable. Assumes", "[ z_dy - y_dz, x_dz - z_dx, y_dx - x_dy ] \"\"\" return", "def MPIrotX(comm,X): \"\"\" returns curl X = [ z_dy - y_dz, x_dz -", "Args: comm -- MPI world communicator var -- input field dim -- axis", "= tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del tmp return", "i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def", "in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X):", "= (rank - n_proc[1]) % (n_proc[0] * n_proc[1]) # send right slice of", "- y_dz, x_dz - z_dx, y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) -", "dim -- axis along the derivative should be taken \"\"\" rank = comm.Get_rank()", "tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del tmp return np.array((p1 - m1)/ds) def", "= comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim", "+ z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns", "- n_proc[1]) % (n_proc[0] * n_proc[1]) # send right slice of local proc", "+ MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\" res", "slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis 2 covers entire", "(rank // n_proc[1]) * n_proc[1] # send right slice of local proc as", "def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0)", "proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:]", "X = x_dx + y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) +", "div X = x_dx + y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1)", "X = [ z_dy - y_dz, x_dz - z_dx, y_dx - x_dy ]", "follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of", "(n_proc[0] * n_proc[1]) prev_proc = (rank - n_proc[1]) % (n_proc[0] * n_proc[1]) #", "slice of local proc as left slab to follow proc leftSlice = None", "* n_proc[1]) # send right slice of local proc as left slab to", "+ X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\"", "None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right", "tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim == 1): next_proc = (rank + 1)", "X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X .", "def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" res = np.zeros_like(X) for", "i in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) +", "m1 = tmp[:,sl_m1,:] elif (dim == 2): # nothing special required here as", "= np.zeros_like(X) for i in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1]", "= None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 =", "% (n_proc[0] * n_proc[1]) prev_proc = (rank - n_proc[1]) % (n_proc[0] * n_proc[1])", "as left slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) #", "comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim ==", "(rank + 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] prev_proc =", "taken \"\"\" rank = comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 =", "np import FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite difference) of", "z_dx, y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2) - MPIderiv2(comm,X[2],0),", "= (rank - 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] #", "required here as we do pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1", "first derivative (2-point central finite difference) of a 3-dimensional, real space, uniform grid", "\"\"\" res = np.zeros_like(Y) for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1)", "\"\"\" returns (X . grad) Y \"\"\" res = np.zeros_like(X) for i in", "slice of local proc as right slab to follow proc rightSlice = None", "right slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp =", "world communicator var -- input field dim -- axis along the derivative should", "np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl X = [ z_dy", "MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns div X = x_dx + y_dy", "FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite difference) of a 3-dimensional,", "Assumes that the field is split on axis 0 between processes. Args: comm", "(with L = 1) variable. Assumes that the field is split on axis", "np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del", "dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N // loc_slc if (dim == 0):", "rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1", "- z_dx, y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2) -", "= comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c =", "1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] prev_proc = (rank -", "0): next_proc = (rank + n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc = (rank", "n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc = (rank - n_proc[1]) % (n_proc[0] *", "-- MPI world communicator var -- input field dim -- axis along the", "= [ x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def", "N // loc_slc if (dim == 0): next_proc = (rank + n_proc[1]) %", "slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis", "def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\" res = np.zeros_like(Y) for i", "p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim == 2): # nothing special", "the derivative should be taken \"\"\" rank = comm.Get_rank() size = comm.Get_size() sl_m1", "decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else:", "slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left", "(rank + n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc = (rank - n_proc[1]) %", "MPI world communicator var -- input field dim -- axis along the derivative", "+ X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X", "return X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def", "MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\" res =", "== 1): next_proc = (rank + 1) % n_proc[1] + (rank // n_proc[1])", "z_dy - y_dz, x_dz - z_dx, y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1)", "right slice of local proc as left slab to follow proc leftSlice =", "print(\"watch out for dimension!\") del tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\"", "return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i", "] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl X", "communicator var -- input field dim -- axis along the derivative should be", "MPIdivX(comm,X): \"\"\" returns div X = x_dx + y_dy + z_dz \"\"\" return", "= tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim == 2): # nothing special required", ". grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) +", "m1 = tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del tmp return np.array((p1 -", "split on axis 0 between processes. Args: comm -- MPI world communicator var", "MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite difference) of a 3-dimensional, real space,", "X = [ x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ])", "= tmp[:,sl_m1,:] elif (dim == 2): # nothing special required here as we", "# send right slice of local proc as left slab to follow proc", "to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1", "MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X):", "# send left slice of local proc as right slab to follow proc", "assumes axis 2 covers entire grid with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(),", "(rank // n_proc[1]) * n_proc[1] prev_proc = (rank - 1) % n_proc[1] +", "None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right", "1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N // loc_slc", "* MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\"", "follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of", "tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del tmp return np.array((p1", "MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) +", "* MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns div", "\"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j", "y_dz, x_dz - z_dx, y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2),", "def MPIgradX(comm,X): \"\"\" returns grad X = [ x_dx, y_dy, z_dz ] \"\"\"", "tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim == 1):", "(2-point central finite difference) of a 3-dimensional, real space, uniform grid (with L", "difference) of a 3-dimensional, real space, uniform grid (with L = 1) variable.", "* n_proc[1]) prev_proc = (rank - n_proc[1]) % (n_proc[0] * n_proc[1]) # send", "# nothing special required here as we do pencil decomp in x-y tmp", "+ X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns div X =", "returns (X . grad) Y \"\"\" res = np.zeros_like(X) for i in range(3):", "to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1", "(X . grad) Y \"\"\" res = np.zeros_like(X) for i in range(3): res[i]", "= np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim == 2): #", "proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local", "+ (rank // n_proc[1]) * n_proc[1] prev_proc = (rank - 1) % n_proc[1]", "curl X = [ z_dy - y_dz, x_dz - z_dx, y_dx - x_dy", "= slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes", "return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\"", "+ 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] prev_proc = (rank", "= None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as", "= 2.0/float(var.shape[2]) # assumes axis 2 covers entire grid with L = 1", "- 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] # send right", "for i in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1)", "comm -- MPI world communicator var -- input field dim -- axis along", "n_proc[1]) % (n_proc[0] * n_proc[1]) # send right slice of local proc as", "as left slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) #", "tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y", "prev_proc = (rank - n_proc[1]) % (n_proc[0] * n_proc[1]) # send right slice", "* n_proc[1] prev_proc = (rank - 1) % n_proc[1] + (rank // n_proc[1])", "MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\"", "n_proc[1]) # send right slice of local proc as left slab to follow", "\"\"\" returns pd_j X_j Y_i \"\"\" res = np.zeros_like(Y) for i in range(3):", "size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds", "\"\"\" rank = comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None)", "y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\"", "+ MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns grad X =", "2 covers entire grid with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc", "n_proc[1] # send right slice of local proc as left slab to follow", "res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2))", "leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local proc", "of local proc as right slab to follow proc rightSlice = None rightSlice", "of local proc as left slab to follow proc leftSlice = None leftSlice", "1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] # send right slice", "send left slice of local proc as right slab to follow proc rightSlice", "== 2): # nothing special required here as we do pencil decomp in", "grad X = [ x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2),", "= MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns grad", "x_dz - z_dx, y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2)", "input field dim -- axis along the derivative should be taken \"\"\" rank", "np.zeros_like(X) for i in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1] *", "= FFTHelperFuncs.local_shape n_proc = N // loc_slc if (dim == 0): next_proc =", "= (rank + 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] prev_proc", "field dim -- axis along the derivative should be taken \"\"\" rank =", "np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N // loc_slc if (dim ==", "right slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp =", "loc_slc = FFTHelperFuncs.local_shape n_proc = N // loc_slc if (dim == 0): next_proc", "del tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad)", "2.0/float(var.shape[2]) # assumes axis 2 covers entire grid with L = 1 N", "grid with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc", "pd_j X_j Y_i \"\"\" res = np.zeros_like(Y) for i in range(3): res[i] =", "with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc =", "entire grid with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape", "import numpy as np import FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central", "n_proc[1] + (rank // n_proc[1]) * n_proc[1] # send right slice of local", "= (rank + n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc = (rank - n_proc[1])", "\"\"\" returns grad X = [ x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0),", "n_proc[1]) * n_proc[1] prev_proc = (rank - 1) % n_proc[1] + (rank //", "to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice", "(n_proc[0] * n_proc[1]) # send right slice of local proc as left slab", "comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None)", "MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns grad X = [", "(rank - n_proc[1]) % (n_proc[0] * n_proc[1]) # send right slice of local", "(dim == 1): next_proc = (rank + 1) % n_proc[1] + (rank //", "= None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 =", "n_proc[1] prev_proc = (rank - 1) % n_proc[1] + (rank // n_proc[1]) *", "Y_i \"\"\" res = np.zeros_like(Y) for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) +", "(rank - 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1] # send", "local proc as left slab to follow proc leftSlice = None leftSlice =", "tmp[sl_m1,:,:] elif (dim == 1): next_proc = (rank + 1) % n_proc[1] +", "]) def MPIrotX(comm,X): \"\"\" returns curl X = [ z_dy - y_dz, x_dz", "prev_proc = (rank - 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1]", "tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim == 2):", "rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif", "# assumes axis 2 covers entire grid with L = 1 N =", "(X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res", "MPIrotX(comm,X): \"\"\" returns curl X = [ z_dy - y_dz, x_dz - z_dx,", "for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res", "y_dx - x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2) - MPIderiv2(comm,X[2],0), MPIderiv2(comm,X[1],0)", "m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" return X[0] *", "comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2])", "grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2]", "left slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send", "np.zeros_like(Y) for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return", "Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2] *", "space, uniform grid (with L = 1) variable. Assumes that the field is", "here as we do pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 =", "proc as left slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc)", "= 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N //", "= np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out for dimension!\")", "field is split on axis 0 between processes. Args: comm -- MPI world", "grad) Y \"\"\" res = np.zeros_like(X) for i in range(3): res[i] = (X[0]", "X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns", "MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" res = np.zeros_like(X) for i", "return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl X = [", "tmp[:,sl_m1,:] elif (dim == 2): # nothing special required here as we do", "leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right slab", "- x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2) - MPIderiv2(comm,X[2],0), MPIderiv2(comm,X[1],0) -", "// n_proc[1]) * n_proc[1] # send right slice of local proc as left", "n_proc = N // loc_slc if (dim == 0): next_proc = (rank +", "np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim == 1): next_proc =", "MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y", "MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns div X", "processes. Args: comm -- MPI world communicator var -- input field dim --", "* MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad)", "dimension!\") del tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X .", "n_proc[1]) * n_proc[1] # send right slice of local proc as left slab", "left slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send", "returns (X . grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1] *", "out for dimension!\") del tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns", "p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim == 1): next_proc = (rank", "X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y):", "y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns", "between processes. Args: comm -- MPI world communicator var -- input field dim", "return res def MPIgradX(comm,X): \"\"\" returns grad X = [ x_dx, y_dy, z_dz", "range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\"", "1) variable. Assumes that the field is split on axis 0 between processes.", "p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del tmp", "= comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right slab to", "to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice", "loc_slc if (dim == 0): next_proc = (rank + n_proc[1]) % (n_proc[0] *", "leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local proc", "<gh_stars>1-10 import numpy as np import FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point", "do pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 =", "X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" res", "slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0)", "rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif", "sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis 2", "finite difference) of a 3-dimensional, real space, uniform grid (with L = 1)", "% (n_proc[0] * n_proc[1]) # send right slice of local proc as left", "[ x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X):", "(dim == 0): next_proc = (rank + n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc", "as np import FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite difference)", "on axis 0 between processes. Args: comm -- MPI world communicator var --", "axis 2 covers entire grid with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int)", "be taken \"\"\" rank = comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1", "range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2] *", "- m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" return X[0]", "proc as left slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc)", "// n_proc[1]) * n_proc[1] prev_proc = (rank - 1) % n_proc[1] + (rank", "m1 = tmp[sl_m1,:,:] elif (dim == 1): next_proc = (rank + 1) %", "tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim == 2): # nothing special required here", "MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\" res = np.zeros_like(Y) for", "Y \"\"\" res = np.zeros_like(X) for i in range(3): res[i] = (X[0] *", "proc as right slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc)", "+ n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc = (rank - n_proc[1]) % (n_proc[0]", "variable. Assumes that the field is split on axis 0 between processes. Args:", "numpy as np import FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite", "proc as right slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc)", "None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:]", "elif (dim == 1): next_proc = (rank + 1) % n_proc[1] + (rank", "np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" return", "proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:]", "sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis 2 covers entire grid", "next_proc = (rank + n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc = (rank -", "def MPIdivX(comm,X): \"\"\" returns div X = x_dx + y_dy + z_dz \"\"\"", "\"\"\" returns curl X = [ z_dy - y_dz, x_dz - z_dx, y_dx", "MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns grad X", "else: print(\"watch out for dimension!\") del tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y):", "= tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim == 1): next_proc = (rank +", ". grad) Y \"\"\" res = np.zeros_like(X) for i in range(3): res[i] =", "as right slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp", "* MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" res =", "\"\"\" returns div X = x_dx + y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0)", "= comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right slab to", "slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis 2 covers entire grid with L", "+ MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\" res = np.zeros_like(Y)", "= np.zeros_like(Y) for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2)", "-- input field dim -- axis along the derivative should be taken \"\"\"", "] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2) - MPIderiv2(comm,X[2],0), MPIderiv2(comm,X[1],0) - MPIderiv2(comm,X[0],1), ])", "x_dy ] \"\"\" return np.array([MPIderiv2(comm,X[2],1) - MPIderiv2(comm,X[1],2), MPIderiv2(comm,X[0],2) - MPIderiv2(comm,X[2],0), MPIderiv2(comm,X[1],0) - MPIderiv2(comm,X[0],1),", "L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N", "derivative (2-point central finite difference) of a 3-dimensional, real space, uniform grid (with", "None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:]", "\"\"\"Returns first derivative (2-point central finite difference) of a 3-dimensional, real space, uniform", "2): # nothing special required here as we do pencil decomp in x-y", "= [ z_dy - y_dz, x_dz - z_dx, y_dx - x_dy ] \"\"\"", "x_dx + y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def", "* n_proc[1] # send right slice of local proc as left slab to", "= np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N // loc_slc if (dim", "axis 0 between processes. Args: comm -- MPI world communicator var -- input", "+ X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\"", "local proc as right slab to follow proc rightSlice = None rightSlice =", "= 1) variable. Assumes that the field is split on axis 0 between", "we do pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1", "(X . grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1)", "X_j Y_i \"\"\" res = np.zeros_like(Y) for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0)", "3-dimensional, real space, uniform grid (with L = 1) variable. Assumes that the", "MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl X = [ z_dy -", "1): next_proc = (rank + 1) % n_proc[1] + (rank // n_proc[1]) *", "+ MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns grad X = [ x_dx,", "along the derivative should be taken \"\"\" rank = comm.Get_rank() size = comm.Get_size()", "= (X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return", "* MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2] * MPIderiv2(comm,Y[i],2)) return res def", "n_proc[1]) prev_proc = (rank - n_proc[1]) % (n_proc[0] * n_proc[1]) # send right", "(dim == 2): # nothing special required here as we do pencil decomp", "MPIgradX(comm,X): \"\"\" returns grad X = [ x_dx, y_dy, z_dz ] \"\"\" return", "res def MPIdivX(comm,X): \"\"\" returns div X = x_dx + y_dy + z_dz", "z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl", "MPIdivXY(comm,X,Y): \"\"\" returns pd_j X_j Y_i \"\"\" res = np.zeros_like(Y) for i in", "* MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns div X = x_dx +", "+ y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y):", "L = 1) variable. Assumes that the field is split on axis 0", "// loc_slc if (dim == 0): next_proc = (rank + n_proc[1]) % (n_proc[0]", "for dimension!\") del tmp return np.array((p1 - m1)/ds) def MPIXdotGradYScalar(comm,X,Y): \"\"\" returns (X", "should be taken \"\"\" rank = comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None)", "def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite difference) of a 3-dimensional, real", "0 between processes. Args: comm -- MPI world communicator var -- input field", "axis along the derivative should be taken \"\"\" rank = comm.Get_rank() size =", "x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out", "x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\"", "n_proc[1] + (rank // n_proc[1]) * n_proc[1] prev_proc = (rank - 1) %", "that the field is split on axis 0 between processes. Args: comm --", "\"\"\" res = np.zeros_like(X) for i in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0)", "follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 =", "res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) + MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns", "== 0): next_proc = (rank + n_proc[1]) % (n_proc[0] * n_proc[1]) prev_proc =", "the field is split on axis 0 between processes. Args: comm -- MPI", "rank = comm.Get_rank() size = comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c", "slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1)", "returns pd_j X_j Y_i \"\"\" res = np.zeros_like(Y) for i in range(3): res[i]", "comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right slab to follow", "= comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim", "MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns", "sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) #", "res def MPIgradX(comm,X): \"\"\" returns grad X = [ x_dx, y_dy, z_dz ]", "= N // loc_slc if (dim == 0): next_proc = (rank + n_proc[1])", "res = np.zeros_like(Y) for i in range(3): res[i] = MPIderiv2(comm,X[0]*Y[i],0) + MPIderiv2(comm,X[1]*Y[i],1) +", "% n_proc[1] + (rank // n_proc[1]) * n_proc[1] prev_proc = (rank - 1)", "in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch", "= np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim == 1): next_proc", "grid (with L = 1) variable. Assumes that the field is split on", "\"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1), MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl X =", "next_proc = (rank + 1) % n_proc[1] + (rank // n_proc[1]) * n_proc[1]", "returns curl X = [ z_dy - y_dz, x_dz - z_dx, y_dx -", "np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1 = tmp[:,sl_m1,:] elif (dim == 2): # nothing", "as right slab to follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp", "special required here as we do pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2)", "elif (dim == 2): # nothing special required here as we do pencil", "= tmp[:,:,sl_m1] else: print(\"watch out for dimension!\") del tmp return np.array((p1 - m1)/ds)", "= slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis 2 covers entire grid with", "\"\"\" returns (X . grad) Y \"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1]", "import FFTHelperFuncs def MPIderiv2(comm,var,dim): \"\"\"Returns first derivative (2-point central finite difference) of a", "covers entire grid with L = 1 N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc =", "pencil decomp in x-y tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1]", "% n_proc[1] + (rank // n_proc[1]) * n_proc[1] # send right slice of", "MPIderiv2(comm,X,2), ]) def MPIrotX(comm,X): \"\"\" returns curl X = [ z_dy - y_dz,", "+ (rank // n_proc[1]) * n_proc[1] # send right slice of local proc", "= slice(2,None,None) sl_c = slice(1,-1,None) ds = 2.0/float(var.shape[2]) # assumes axis 2 covers", "MPIderiv2(comm,X[2]*Y[i],2) return res def MPIgradX(comm,X): \"\"\" returns grad X = [ x_dx, y_dy,", "leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right slab", "comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local proc as right slab to follow", "follow proc rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 =", "-- axis along the derivative should be taken \"\"\" rank = comm.Get_rank() size", "in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) + X[1] * MPIderiv2(comm,Y[i],1) + X[2]", "N = np.array(FFTHelperFuncs.FFT.global_shape(), dtype=int) loc_slc = FFTHelperFuncs.local_shape n_proc = N // loc_slc if", "\"\"\" return X[0] * MPIderiv2(comm,Y,0) + X[1] * MPIderiv2(comm,Y,1) + X[2] * MPIderiv2(comm,Y,2)", "tmp = np.concatenate((var[:,:,-1:],var,var[:,:,:1]),axis=2) p1 = tmp[:,:,sl_p1] m1 = tmp[:,:,sl_m1] else: print(\"watch out for", "a 3-dimensional, real space, uniform grid (with L = 1) variable. Assumes that", "derivative should be taken \"\"\" rank = comm.Get_rank() size = comm.Get_size() sl_m1 =", "send right slice of local proc as left slab to follow proc leftSlice", "returns grad X = [ x_dx, y_dy, z_dz ] \"\"\" return np.array([MPIderiv2(comm,X,0), MPIderiv2(comm,X,1),", "comm.sendrecv(sendobj=var[:1,:,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=0) p1 = tmp[sl_p1,:,:] m1 = tmp[sl_m1,:,:] elif (dim ==", "central finite difference) of a 3-dimensional, real space, uniform grid (with L =", "var -- input field dim -- axis along the derivative should be taken", "proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[:,-1:,:],dest=next_proc,source=prev_proc) # send left slice of local", "FFTHelperFuncs.local_shape n_proc = N // loc_slc if (dim == 0): next_proc = (rank", "X[2] * MPIderiv2(comm,Y[i],2)) return res def MPIdivX(comm,X): \"\"\" returns div X = x_dx", "res = np.zeros_like(X) for i in range(3): res[i] = (X[0] * MPIderiv2(comm,Y[i],0) +", "nothing special required here as we do pencil decomp in x-y tmp =", "= comm.Get_size() sl_m1 = slice(None,-2,None) sl_p1 = slice(2,None,None) sl_c = slice(1,-1,None) ds =", "= tmp[sl_m1,:,:] elif (dim == 1): next_proc = (rank + 1) % n_proc[1]", "MPIderiv2(comm,Y,2) def MPIXdotGradY(comm,X,Y): \"\"\" returns (X . grad) Y \"\"\" res = np.zeros_like(X)", "real space, uniform grid (with L = 1) variable. Assumes that the field", "slab to follow proc leftSlice = None leftSlice = comm.sendrecv(sendobj=var[-1:,:,:],dest=next_proc,source=prev_proc) # send left", "is split on axis 0 between processes. Args: comm -- MPI world communicator", "z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2) def MPIdivXY(comm,X,Y): \"\"\" returns pd_j", "rightSlice = None rightSlice = comm.sendrecv(sendobj=var[:,:1,:],dest=prev_proc,source=next_proc) tmp = np.concatenate((leftSlice,var,rightSlice),axis=1) p1 = tmp[:,sl_p1,:] m1", "ds = 2.0/float(var.shape[2]) # assumes axis 2 covers entire grid with L =", "if (dim == 0): next_proc = (rank + n_proc[1]) % (n_proc[0] * n_proc[1])", "return res def MPIdivX(comm,X): \"\"\" returns div X = x_dx + y_dy +", "= x_dx + y_dy + z_dz \"\"\" return MPIderiv2(comm,X[0],0) + MPIderiv2(comm,X[1],1) + MPIderiv2(comm,X[2],2)" ]
[ "1. Print row 1 (remember, indexing starts at zero) of A. print(A[0,:]) #", "Then, reshape B to make it look like A again. C = B.reshape((4,4))", "you do B = np.ravel(A). B = np.ravel(A) print(B) # 2. Look of", "# Create vector b b = np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print", "# Practice 1 # Generate array of 0 to 10 my_ar1 = np.arange(0,11,dtype='float')", "that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to compute", "# Compute diameter from area diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice 3", "the values of every entry in A that is greater than 2. print(A[A", "units.\"\"\" # Compute diameter from area diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice", "np # Practice 1 # Generate array of 0 to 10 my_ar1 =", "\\n',AT) # 4. Use np.linalg.inv() to compute the inverse of A. AInv =", "# 2. Look of the documentation for np.reshape(). Then, reshape B to make", "print(B) # 2. Look of the documentation for np.reshape(). Then, reshape B to", "= np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7],", "# 1. Print row 1 (remember, indexing starts at zero) of A. print(A[0,:])", "# 4. Use np.linalg.inv() to compute the inverse of A. AInv = np.linalg.inv(A)", "2018 <NAME> # Practice with NumPy import numpy as np # Practice 1", "4. Use np.linalg.inv() to compute the inverse of A. AInv = np.linalg.inv(A) print('Inverse", "Now do np.dot(A, x) to verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b))", "Practice with NumPy import numpy as np # Practice 1 # Generate array", "4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) # Create vector b b = np.array([1.1,", "2.3, 3.3, 3.9]) # 1. Print row 1 (remember, indexing starts at zero)", "b = np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print row 1 (remember, indexing", "columns 1 and 3 of A. print(A[:,(0,2)]) # 3. Print the values of", "# 2. Now do np.dot(A, x) to verify that A⋅x=bA⋅x=b . b1 =", "of 0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) #", "entry in A that is greater than 2. print(A[A > 2]) # 4.", "Print columns 1 and 3 of A. print(A[:,(0,2)]) # 3. Print the values", "1 and 3 of A. print(A[:,(0,2)]) # 3. Print the values of every", "of every entry in A that is greater than 2. print(A[A > 2])", "np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to compute the transpose of A. AT", "NumPy import numpy as np # Practice 1 # Generate array of 0", "= np.sqrt((4*xa)/np.pi) return diameter # Practice 3 # Create matrix A A =", "= b is x = ',x) # 2. Now do np.dot(A, x) to", "Generate array of 0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float')", "np.reshape(). Then, reshape B to make it look like A again. C =", "3.3, 3.9]) # 1. Print row 1 (remember, indexing starts at zero) of", "return diameter # Practice 3 # Create matrix A A = np.array([[6.7, 1.3,", "10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 #", "Create matrix A A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4],", "with NumPy import numpy as np # Practice 1 # Generate array of", "np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load in data", "2. Print columns 1 and 3 of A. print(A[:,(0,2)]) # 3. Print the", "verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to", "3 of A. print(A[:,(0,2)]) # 3. Print the values of every entry in", "AT = np.transpose(A) print('Transpose of A is AT = \\n',AT) # 4. Use", "3. Print the values of every entry in A that is greater than", "from area diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice 3 # Create matrix", "diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice 3 # Create matrix A A", "print('Transpose of A is AT = \\n',AT) # 4. Use np.linalg.inv() to compute", "to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2", "= np.ravel(A). B = np.ravel(A) print(B) # 2. Look of the documentation for", "3 # Create matrix A A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5,", "array of 0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2)", "the documentation for np.reshape(). Then, reshape B to make it look like A", "xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array of", "numpy as np # Practice 1 # Generate array of 0 to 10", "Compute diameter from area diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice 3 #", "linear system A⋅x=bA⋅x=b . # Try it out: use np.linalg.solve(). # Store your", "is AInv = \\n',AInv) # 1. See what happens when you do B", "# Practice with NumPy import numpy as np # Practice 1 # Generate", "np.diag() function. print(np.diag(A)) # 1. First, we'll solve the linear system A⋅x=bA⋅x=b .", "A. print(A[:,(0,2)]) # 3. Print the values of every entry in A that", "\"\"\" Convert an array of cross-sectional areas to diameters with commensurate units.\"\"\" #", "your answer in the Numpy array x. x = np.linalg.solve(A,b) print('Solution of A*x", "to compute the transpose of A. AT = np.transpose(A) print('Transpose of A is", "is AT = \\n',AT) # 4. Use np.linalg.inv() to compute the inverse of", "0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4,", "Look of the documentation for np.reshape(). Then, reshape B to make it look", "diameters with commensurate units.\"\"\" # Compute diameter from area diameter = np.sqrt((4*xa)/np.pi) return", "diameter # Practice 3 # Create matrix A A = np.array([[6.7, 1.3, 0.6,", "= \\n',AInv) # 1. See what happens when you do B = np.ravel(A).", "with commensurate units.\"\"\" # Compute diameter from area diameter = np.sqrt((4*xa)/np.pi) return diameter", "print(A[:,(0,2)]) # 3. Print the values of every entry in A that is", "it out: use np.linalg.solve(). # Store your answer in the Numpy array x.", "documentation for np.reshape(). Then, reshape B to make it look like A again.", "0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) # Create vector b b =", "of cross-sectional areas to diameters with commensurate units.\"\"\" # Compute diameter from area", "in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an", "in A that is greater than 2. print(A[A > 2]) # 4. Print", "2. Now do np.dot(A, x) to verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x)", "First, we'll solve the linear system A⋅x=bA⋅x=b . # Try it out: use", "= np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional areas to diameters", "0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) # Create vector", "what happens when you do B = np.ravel(A). B = np.ravel(A) print(B) #", "we'll solve the linear system A⋅x=bA⋅x=b . # Try it out: use np.linalg.solve().", "[0.0, 1.5, 3.4, 7.5]]) # Create vector b b = np.array([1.1, 2.3, 3.3,", "print('Solution of A*x = b is x = ',x) # 2. Now do", "of A is AT = \\n',AT) # 4. Use np.linalg.inv() to compute the", "print('Inverse of A is AInv = \\n',AInv) # 1. See what happens when", "of A. AT = np.transpose(A) print('Transpose of A is AT = \\n',AT) #", "AInv = \\n',AInv) # 1. See what happens when you do B =", "# 1. First, we'll solve the linear system A⋅x=bA⋅x=b . # Try it", "array x. x = np.linalg.solve(A,b) print('Solution of A*x = b is x =", "transpose of A. AT = np.transpose(A) print('Transpose of A is AT = \\n',AT)", "is x = ',x) # 2. Now do np.dot(A, x) to verify that", "= np.linalg.inv(A) print('Inverse of A is AInv = \\n',AInv) # 1. See what", "np.sqrt((4*xa)/np.pi) return diameter # Practice 3 # Create matrix A A = np.array([[6.7,", "np.linalg.inv(A) print('Inverse of A is AInv = \\n',AInv) # 1. See what happens", "reshape B to make it look like A again. C = B.reshape((4,4)) print(C)", "zero) of A. print(A[0,:]) # 2. Print columns 1 and 3 of A.", "1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5,", "np.linalg.solve(A,b) print('Solution of A*x = b is x = ',x) # 2. Now", "the inverse of A. AInv = np.linalg.inv(A) print('Inverse of A is AInv =", "\\n',AInv) # 1. See what happens when you do B = np.ravel(A). B", "the np.diag() function. print(np.diag(A)) # 1. First, we'll solve the linear system A⋅x=bA⋅x=b", "np.linalg.solve(). # Store your answer in the Numpy array x. x = np.linalg.solve(A,b)", "the Numpy array x. x = np.linalg.solve(A,b) print('Solution of A*x = b is", "2 # Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa):", "in the Numpy array x. x = np.linalg.solve(A,b) print('Solution of A*x = b", "values of every entry in A that is greater than 2. print(A[A >", "system A⋅x=bA⋅x=b . # Try it out: use np.linalg.solve(). # Store your answer", "when you do B = np.ravel(A). B = np.ravel(A) print(B) # 2. Look", "# 1. See what happens when you do B = np.ravel(A). B =", "using the np.diag() function. print(np.diag(A)) # 1. First, we'll solve the linear system", "<NAME> # Practice with NumPy import numpy as np # Practice 1 #", "print(my_ar2) # Practice 2 # Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low =", "Store your answer in the Numpy array x. x = np.linalg.solve(A,b) print('Solution of", "array of cross-sectional areas to diameters with commensurate units.\"\"\" # Compute diameter from", "area diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice 3 # Create matrix A", "b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to compute the transpose of", "vector b b = np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print row 1", "print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load in data xa_high", "def xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional areas to diameters with commensurate", "= np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to compute the transpose of A.", "of A. print(A[:,(0,2)]) # 3. Print the values of every entry in A", "A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5,", "solve the linear system A⋅x=bA⋅x=b . # Try it out: use np.linalg.solve(). #", "# Create matrix A A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4,", "diameter from area diameter = np.sqrt((4*xa)/np.pi) return diameter # Practice 3 # Create", "A. using the np.diag() function. print(np.diag(A)) # 1. First, we'll solve the linear", "compute the transpose of A. AT = np.transpose(A) print('Transpose of A is AT", "answer in the Numpy array x. x = np.linalg.solve(A,b) print('Solution of A*x =", "Create vector b b = np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print row", "do B = np.ravel(A). B = np.ravel(A) print(B) # 2. Look of the", "# Practice 2 # Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#')", "A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to compute the", "of A. using the np.diag() function. print(np.diag(A)) # 1. First, we'll solve the", "starts at zero) of A. print(A[0,:]) # 2. Print columns 1 and 3", "as np # Practice 1 # Generate array of 0 to 10 my_ar1", "print(A[A > 2]) # 4. Print the diagonal of A. using the np.diag()", "happens when you do B = np.ravel(A). B = np.ravel(A) print(B) # 2.", "2. Look of the documentation for np.reshape(). Then, reshape B to make it", "np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low", "AInv = np.linalg.inv(A) print('Inverse of A is AInv = \\n',AInv) # 1. See", "A. AInv = np.linalg.inv(A) print('Inverse of A is AInv = \\n',AInv) # 1.", "np.dot(A, x) to verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3.", "np.linalg.inv() to compute the inverse of A. AInv = np.linalg.inv(A) print('Inverse of A", "B = np.ravel(A) print(B) # 2. Look of the documentation for np.reshape(). Then,", "b is x = ',x) # 2. Now do np.dot(A, x) to verify", "Practice 2 # Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def", "# Practice 3 # Create matrix A A = np.array([[6.7, 1.3, 0.6, 0.7],", "compute the inverse of A. AInv = np.linalg.inv(A) print('Inverse of A is AInv", "of A. AInv = np.linalg.inv(A) print('Inverse of A is AInv = \\n',AInv) #", "commensurate units.\"\"\" # Compute diameter from area diameter = np.sqrt((4*xa)/np.pi) return diameter #", "of A. print(A[0,:]) # 2. Print columns 1 and 3 of A. print(A[:,(0,2)])", ". # Try it out: use np.linalg.solve(). # Store your answer in the", "= np.linalg.solve(A,b) print('Solution of A*x = b is x = ',x) # 2.", "Use np.linalg.inv() to compute the inverse of A. AInv = np.linalg.inv(A) print('Inverse of", "data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array", "xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional areas to diameters with commensurate units.\"\"\"", "= ',x) # 2. Now do np.dot(A, x) to verify that A⋅x=bA⋅x=b .", "February 2018 <NAME> # Practice with NumPy import numpy as np # Practice", "use np.linalg.solve(). # Store your answer in the Numpy array x. x =", "xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional areas to", "# Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\"", "A. AT = np.transpose(A) print('Transpose of A is AT = \\n',AT) # 4.", "at zero) of A. print(A[0,:]) # 2. Print columns 1 and 3 of", "np.transpose(A) print('Transpose of A is AT = \\n',AT) # 4. Use np.linalg.inv() to", "AT = \\n',AT) # 4. Use np.linalg.inv() to compute the inverse of A.", "A A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8,", "# Try it out: use np.linalg.solve(). # Store your answer in the Numpy", "print(np.diag(A)) # 1. First, we'll solve the linear system A⋅x=bA⋅x=b . # Try", "x. x = np.linalg.solve(A,b) print('Solution of A*x = b is x = ',x)", "Use np.transpose() to compute the transpose of A. AT = np.transpose(A) print('Transpose of", "A is AT = \\n',AT) # 4. Use np.linalg.inv() to compute the inverse", "= np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load in", "> 2]) # 4. Print the diagonal of A. using the np.diag() function.", "1.5, 3.4, 7.5]]) # Create vector b b = np.array([1.1, 2.3, 3.3, 3.9])", "2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) # Create vector b", "0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]])", "of A*x = b is x = ',x) # 2. Now do np.dot(A,", "for np.reshape(). Then, reshape B to make it look like A again. C", "[1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) # Create vector b b", "Numpy array x. x = np.linalg.solve(A,b) print('Solution of A*x = b is x", "1 # Generate array of 0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2", "2. print(A[A > 2]) # 4. Print the diagonal of A. using the", "and 3 of A. print(A[:,(0,2)]) # 3. Print the values of every entry", "the transpose of A. AT = np.transpose(A) print('Transpose of A is AT =", "that is greater than 2. print(A[A > 2]) # 4. Print the diagonal", "1.7], [0.0, 1.5, 3.4, 7.5]]) # Create vector b b = np.array([1.1, 2.3,", "x = ',x) # 2. Now do np.dot(A, x) to verify that A⋅x=bA⋅x=b", "print(np.isclose(b1,b)) # 3. Use np.transpose() to compute the transpose of A. AT =", "Practice 1 # Generate array of 0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1)", "diagonal of A. using the np.diag() function. print(np.diag(A)) # 1. First, we'll solve", "np.transpose() to compute the transpose of A. AT = np.transpose(A) print('Transpose of A", "import numpy as np # Practice 1 # Generate array of 0 to", "A that is greater than 2. print(A[A > 2]) # 4. Print the", "3.4, 7.5]]) # Create vector b b = np.array([1.1, 2.3, 3.3, 3.9]) #", "indexing starts at zero) of A. print(A[0,:]) # 2. Print columns 1 and", "# 3. Use np.transpose() to compute the transpose of A. AT = np.transpose(A)", "# 3. Print the values of every entry in A that is greater", "7.5]]) # Create vector b b = np.array([1.1, 2.3, 3.3, 3.9]) # 1.", "# 2. Print columns 1 and 3 of A. print(A[:,(0,2)]) # 3. Print", ". b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose() to compute the transpose", "x) to verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use", "<reponame>MiroGasparek/python_intro # 21 February 2018 <NAME> # Practice with NumPy import numpy as", "0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice", "Try it out: use np.linalg.solve(). # Store your answer in the Numpy array", "A*x = b is x = ',x) # 2. Now do np.dot(A, x)", "A is AInv = \\n',AInv) # 1. See what happens when you do", "Print row 1 (remember, indexing starts at zero) of A. print(A[0,:]) # 2.", "21 February 2018 <NAME> # Practice with NumPy import numpy as np #", "= np.ravel(A) print(B) # 2. Look of the documentation for np.reshape(). Then, reshape", "Convert an array of cross-sectional areas to diameters with commensurate units.\"\"\" # Compute", "an array of cross-sectional areas to diameters with commensurate units.\"\"\" # Compute diameter", "# Store your answer in the Numpy array x. x = np.linalg.solve(A,b) print('Solution", "to verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) # 3. Use np.transpose()", "3. Use np.transpose() to compute the transpose of A. AT = np.transpose(A) print('Transpose", "= np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print row 1 (remember, indexing starts", "inverse of A. AInv = np.linalg.inv(A) print('Inverse of A is AInv = \\n',AInv)", "Practice 3 # Create matrix A A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1,", "1. First, we'll solve the linear system A⋅x=bA⋅x=b . # Try it out:", "function. print(np.diag(A)) # 1. First, we'll solve the linear system A⋅x=bA⋅x=b . #", "= \\n',AT) # 4. Use np.linalg.inv() to compute the inverse of A. AInv", "= np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#')", "the diagonal of A. using the np.diag() function. print(np.diag(A)) # 1. First, we'll", "x = np.linalg.solve(A,b) print('Solution of A*x = b is x = ',x) #", "of the documentation for np.reshape(). Then, reshape B to make it look like", "b b = np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print row 1 (remember,", "of A is AInv = \\n',AInv) # 1. See what happens when you", "my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load in data xa_high =", "(remember, indexing starts at zero) of A. print(A[0,:]) # 2. Print columns 1", "do np.dot(A, x) to verify that A⋅x=bA⋅x=b . b1 = np.dot(A,x) print(np.isclose(b1,b)) #", "print(A[0,:]) # 2. Print columns 1 and 3 of A. print(A[:,(0,2)]) # 3.", "np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional areas", "np.ravel(A). B = np.ravel(A) print(B) # 2. Look of the documentation for np.reshape().", "my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 = np.linspace(0,10,11,dtype='float') print(my_ar2) # Practice 2 # Load", "A. print(A[0,:]) # 2. Print columns 1 and 3 of A. print(A[:,(0,2)]) #", "to diameters with commensurate units.\"\"\" # Compute diameter from area diameter = np.sqrt((4*xa)/np.pi)", "np.array([1.1, 2.3, 3.3, 3.9]) # 1. Print row 1 (remember, indexing starts at", "Print the values of every entry in A that is greater than 2.", "2]) # 4. Print the diagonal of A. using the np.diag() function. print(np.diag(A))", "# 4. Print the diagonal of A. using the np.diag() function. print(np.diag(A)) #", "See what happens when you do B = np.ravel(A). B = np.ravel(A) print(B)", "np.ravel(A) print(B) # 2. Look of the documentation for np.reshape(). Then, reshape B", "out: use np.linalg.solve(). # Store your answer in the Numpy array x. x", "cross-sectional areas to diameters with commensurate units.\"\"\" # Compute diameter from area diameter", "5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) # Create", "the linear system A⋅x=bA⋅x=b . # Try it out: use np.linalg.solve(). # Store", "row 1 (remember, indexing starts at zero) of A. print(A[0,:]) # 2. Print", "to compute the inverse of A. AInv = np.linalg.inv(A) print('Inverse of A is", "# Generate array of 0 to 10 my_ar1 = np.arange(0,11,dtype='float') print(my_ar1) my_ar2 =", "than 2. print(A[A > 2]) # 4. Print the diagonal of A. using", "1. See what happens when you do B = np.ravel(A). B = np.ravel(A)", "B = np.ravel(A). B = np.ravel(A) print(B) # 2. Look of the documentation", "areas to diameters with commensurate units.\"\"\" # Compute diameter from area diameter =", "np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0,", "3.9]) # 1. Print row 1 (remember, indexing starts at zero) of A.", "= np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional", "np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert an array of cross-sectional areas to diameters with", "# 21 February 2018 <NAME> # Practice with NumPy import numpy as np", "A⋅x=bA⋅x=b . # Try it out: use np.linalg.solve(). # Store your answer in", "1 (remember, indexing starts at zero) of A. print(A[0,:]) # 2. Print columns", "= np.transpose(A) print('Transpose of A is AT = \\n',AT) # 4. Use np.linalg.inv()", "is greater than 2. print(A[A > 2]) # 4. Print the diagonal of", "Load in data xa_high = np.loadtxt('data/xa_high_food.csv',comments='#') xa_low = np.loadtxt('data/xa_low_food.csv',comments='#') def xa_to_diameter(xa): \"\"\" Convert", "greater than 2. print(A[A > 2]) # 4. Print the diagonal of A.", "Print the diagonal of A. using the np.diag() function. print(np.diag(A)) # 1. First,", "',x) # 2. Now do np.dot(A, x) to verify that A⋅x=bA⋅x=b . b1", "[0.1, 5.5, 0.4, 2.4], [1.1, 0.8, 4.5, 1.7], [0.0, 1.5, 3.4, 7.5]]) #", "matrix A A = np.array([[6.7, 1.3, 0.6, 0.7], [0.1, 5.5, 0.4, 2.4], [1.1,", "every entry in A that is greater than 2. print(A[A > 2]) #", "4. Print the diagonal of A. using the np.diag() function. print(np.diag(A)) # 1." ]
[ "not len(f.read())<2: for l in f.read(): sessions.append(l) f = open('.sessions', 'w') r='' for", "f.read(): sessions.append(l) f = open('.sessions', 'w') r='' for s in sessions:r+=s f.write(r) f.close()", "display_name, conn): self.display_name = display_name self.conn = conn self.id = conn.id def load_sessions():", "sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not len(f.read())<2: for l in", "not len(f.read())<2: for l in f.read(): sessions.append(l) f = open(fn, 'w') r='' for", "= display_name self.conn = conn self.id = conn.id def load_sessions(): if os.path.exists('.sessions') and", "= conn self.id = conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[]", "sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn):", "else: f=open(fn,'w+') f.close() return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as", "f=open(fn,'w+') f.close() return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f:", "f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f = open(fn, 'w')", "f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return", "f.close() return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if", "'w') r='' for s in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}']", "def __init__(self, display_name, conn): self.display_name = display_name self.conn = conn self.id = conn.id", "sessions else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r')", "conn): self.display_name = display_name self.conn = conn self.id = conn.id def load_sessions(): if", "l in f.read(): sessions.append(l) f = open(fn, 'w') r='' for s in sessions:r+=s", "len(f.read())<2: for l in f.read(): sessions.append(l) f = open(fn, 'w') r='' for s", "for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close() return", "def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not len(f.read())<2: for", "f = open('.sessions', 'w') r='' for s in sessions:r+=s f.write(r) f.close() def save_(self,fn):", "if len(sess)<2: return sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions", "= conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2:", "sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if", "sessions=[] if len(f.read())<2:return sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions", "sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close() return [] def save(self): sessions =", "os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions for l in sess.split('\\n'): opts=l.split(':')", "def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions", "def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not len(f.read())<2: for", "return sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+')", "in f.read(): sessions.append(l) f = open(fn, 'w') r='' for s in sessions:r+=s f.write(r)", "save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not len(f.read())<2: for l", "os from .Connection import * class Session: def __init__(self, display_name, conn): self.display_name =", "if len(f.read())<2:return sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else:", "display_name self.conn = conn self.id = conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'):", "load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l in", "open('.sessions','r') as f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f =", "as f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f = open(fn,", "self.id = conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if", "sessions.append(l) f = open('.sessions', 'w') r='' for s in sessions:r+=s f.write(r) f.close() def", "= [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not len(f.read())<2: for l in f.read():", "for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close() return", "sessions=[] sess=f.read() if len(sess)<2: return sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close()", "l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close() return []", "in f.read(): sessions.append(l) f = open('.sessions', 'w') r='' for s in sessions:r+=s f.write(r)", "r='' for s in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with", "def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l", "with open('.sessions','r') as f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f", "f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not len(f.read())<2:", "in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f:", "return sessions else: f=open(fn,'w+') f.close() return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with", "and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions for l in sess.split('\\n'):", "sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if os.path.exists(fn)", "opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close() return [] def save(self): sessions", "len(sess)<2: return sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else:", "conn self.id = conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read()", "class Session: def __init__(self, display_name, conn): self.display_name = display_name self.conn = conn self.id", "[f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not len(f.read())<2: for l in f.read(): sessions.append(l)", "return [] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions", "for l in f.read(): sessions.append(l) f = open('.sessions', 'w') r='' for s in", "f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close() return [] def save(self):", "and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3])))", "sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close()", "for s in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r')", "= [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not len(f.read())<2: for l in f.read():", "self.display_name = display_name self.conn = conn self.id = conn.id def load_sessions(): if os.path.exists('.sessions')", "self.conn = conn self.id = conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r')", "f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3]))))", "for l in f.read(): sessions.append(l) f = open(fn, 'w') r='' for s in", "return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not", "Session: def __init__(self, display_name, conn): self.display_name = display_name self.conn = conn self.id =", "= open('.sessions', 'w') r='' for s in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions", "[f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not len(f.read())<2: for l in f.read(): sessions.append(l)", "os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions for l in", "[] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for", "import * class Session: def __init__(self, display_name, conn): self.display_name = display_name self.conn =", "in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close() return [] def", "<filename>core/Sessions/sessions.py import pickle import os from .Connection import * class Session: def __init__(self,", "l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+') f.close() return []", "* class Session: def __init__(self, display_name, conn): self.display_name = display_name self.conn = conn", "f.close() return [] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return", "f.close() return sessions else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if os.path.exists(fn) and", "f.read(): sessions.append(l) f = open(fn, 'w') r='' for s in sessions:r+=s f.write(r) f.close()", "f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if", "pickle import os from .Connection import * class Session: def __init__(self, display_name, conn):", "len(f.read())<2:return sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close() return sessions else: f=open(fn,'w+')", "f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not", "if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions for l", "os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l in f.read(): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],opts[2],opts[3]))) f.close()", "__init__(self, display_name, conn): self.display_name = display_name self.conn = conn self.id = conn.id def", "open(fn,'r') as f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f =", "from .Connection import * class Session: def __init__(self, display_name, conn): self.display_name = display_name", "sessions else: f=open(fn,'w+') f.close() return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r')", "load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return sessions for", "import pickle import os from .Connection import * class Session: def __init__(self, display_name,", "conn.id def load_sessions(): if os.path.exists('.sessions') and os.path.isfile('.sessions'): f=open('.sessions','r') sessions=[] sess=f.read() if len(sess)<2: return", "sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close()", "sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not len(f.read())<2: for l in", "return sessions else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn):", ".Connection import * class Session: def __init__(self, display_name, conn): self.display_name = display_name self.conn", "f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f = open('.sessions', 'w')", "[] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open('.sessions','r') as f: if not len(f.read())<2:", "len(f.read())<2: for l in f.read(): sessions.append(l) f = open('.sessions', 'w') r='' for s", "if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l in f.read():", "with open(fn,'r') as f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f", "as f: if not len(f.read())<2: for l in f.read(): sessions.append(l) f = open('.sessions',", "open('.sessions', 'w') r='' for s in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions =", "opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if", "import os from .Connection import * class Session: def __init__(self, display_name, conn): self.display_name", "else: f=open('.sessions','w+') f.close() return [] def load_sessions_(fn): if os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[]", "if not len(f.read())<2: for l in f.read(): sessions.append(l) f = open('.sessions', 'w') r=''", "s in sessions:r+=s f.write(r) f.close() def save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as", "save_(self,fn): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}'] with open(fn,'r') as f: if not len(f.read())<2: for l", "l in f.read(): sessions.append(l) f = open('.sessions', 'w') r='' for s in sessions:r+=s", "if not len(f.read())<2: for l in f.read(): sessions.append(l) f = open(fn, 'w') r=''", "os.path.exists(fn) and os.path.isfile(fn): f=open(fn,'r') sessions=[] if len(f.read())<2:return sessions for l in f.read(): opts=l.split(':')", "sess=f.read() if len(sess)<2: return sessions for l in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return", "in sess.split('\\n'): opts=l.split(':') sessions.append(Session(opts[0],Connection(opts[1],int(opts[2]),int(opts[3])))) f.close() return sessions else: f=open('.sessions','w+') f.close() return [] def", "f.close() return sessions else: f=open(fn,'w+') f.close() return [] def save(self): sessions = [f'{self.display_name}:{self.conn.bind_ip}:{self.conn.bind_port}:{self.conn.id}']" ]
[ "blender_async.open_file_dialog() print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results) await sleep(1) loop =", "= bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1) file_name", "bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await", "Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def example(): await", "async def example(): await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1) results", "import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\")", "def example(): await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1) results =", "= bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name)", "my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\")", "await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results) await sleep(1) loop = blender_async.get_event_loop() loop.create_task(example())", "bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1) file_name =", "= bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async", "bpy import asyncio from asyncio import Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog):", "import asyncio from asyncio import Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float", "my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1)", "my_string = bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1) file_name = await blender_async.open_file_dialog()", "asyncio import Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating", "print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results) await sleep(1) loop = blender_async.get_event_loop()", "import bpy import asyncio from asyncio import Task, coroutine, sleep import blender_async class", "await blender_async.open_file_dialog() print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results) await sleep(1) loop", "Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool", "await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog)", "blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string", "= await blender_async.open_file_dialog() print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results) await sleep(1)", "from asyncio import Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some", "TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String", "asyncio from asyncio import Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float =", "file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results) await", "coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool =", "import Task, coroutine, sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\")", "class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string =", "Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def example(): await sleep(1) file_name = await", "example(): await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1) results = await", "sleep import blender_async class TestDialog(blender_async.dialogs.AsyncDialog): my_float = bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle", "bpy.props.FloatProperty(name=\"Some Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def", "Value\") async def example(): await sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1)", "Floating Point\") my_bool = bpy.props.BoolProperty(name=\"Toggle Option\") my_string = bpy.props.StringProperty(name=\"String Value\") async def example():", "sleep(1) file_name = await blender_async.open_file_dialog() print(file_name) await sleep(1) results = await blender_async.open_dialog(TestDialog) print(results)" ]
[ "#col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks =", "# Import packages import numpy as np import pandas as pd from scipy.stats", "Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code", "= banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se", "= banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0])", "here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values = loan_groupby.mean() print(mean_values) # code", "here # -------------- # code starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum()", "here # -------------- # code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']]", "# code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values = loan_groupby.mean()", "ends here # -------------- # code starts here self_emp_y = banks['Self_Employed'] == 'Yes'", "banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends", "banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] == 'No'", "who are not self-employed is: \",percentage_nse) # code ends here # -------------- #", "ends here # -------------- # code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby =", "bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: #", "Loan_Status = 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1]", ",' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100", "\",percentage_se) print(\"Percent of Loan approval for people who are not self-employed is: \",percentage_nse)", "# code starts here # code ends here # -------------- # code starts", "# -------------- # code starts here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status =", "= 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se", "',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent", "'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status =", "code starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col", "here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n =", "employed people is : \",percentage_se) print(\"Percent of Loan approval for people who are", "banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values:", "= (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100", "# banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here", "= pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here # -------------- # code", "(loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval for", "== 'No' Loan_Status = 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n", "-------------- # code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term", "are not self-employed is: \",percentage_nse) # code ends here # -------------- # code", "aggfunc=np.mean) # code ends here # -------------- # code starts here self_emp_y =", "Loan approval for Self employed people is : \",percentage_se) print(\"Percent of Loan approval", "here # -------------- # code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12)", "-------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean)", "code starts here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] == 'Y'", "ends here # -------------- # code starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns", "banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here # -------------- # Code starts here banks[['Gender','Married',", "starts here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n", "= (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval", "Loan approval for people who are not self-employed is: \",percentage_nse) # code ends", "people who are not self-employed is: \",percentage_nse) # code ends here # --------------", "100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval for Self employed", "& loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse =", "as np import pandas as pd from scipy.stats import mode bank = pd.read_csv(path)", "of Loan approval for people who are not self-employed is: \",percentage_nse) # code", "Import packages import numpy as np import pandas as pd from scipy.stats import", "banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se =", "x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here # -------------- # Code starts here", "== 'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status", "banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True)", "starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] #", "print(\"Percent of Loan approval for Self employed people is : \",percentage_se) print(\"Percent of", "# code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term =", "print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts here # code ends here", "bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:]", "as pd from scipy.stats import mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var)", "= banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values = loan_groupby.mean() print(mean_values) # code ends here", "banks[loan_term>=25].shape[0] # code ends here # -------------- # code starts here loan_groupby =", "print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) *", "= banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse", "# code starts here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] ==", "(loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval for Self employed people is :", "not self-employed is: \",percentage_nse) # code ends here # -------------- # code starts", "-------------- # code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values =", "loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status)", "percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan", "self_emp_y = banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed']", "loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse,", "loan_status = banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status = 614", "banks.isnull().sum() #banks.isnull().sum() #code ends here # -------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']]", "\",percentage_nse) # code ends here # -------------- # code starts here loan_term =", "from scipy.stats import mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var =", "banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum()", "ends here # -------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks,", "in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code", "-------------- # Import packages import numpy as np import pandas as pd from", "* 100 print(\"Percent of Loan approval for Self employed people is : \",percentage_se)", "bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code", "is : \",percentage_se) print(\"Percent of Loan approval for people who are not self-employed", "pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here # -------------- # code starts", "index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here # -------------- # code starts here self_emp_y", "#code ends here # -------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount =", "= pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts", "scipy.stats import mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number')", "here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns)", "here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here", "avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here # -------------- #", "ends here # -------------- # code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x :", "-------------- # code starts here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status']", "# -------------- # code starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode", "614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,'", "pd from scipy.stats import mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var", "code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values = loan_groupby.mean() print(mean_values)", "categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts here #", "= (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status)", "= banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here # -------------- #", "people is : \",percentage_se) print(\"Percent of Loan approval for people who are not", "(self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse", "starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col =", "here # -------------- # code starts here self_emp_y = banks['Self_Employed'] == 'Yes' loan_status", "of Loan approval for Self employed people is : \",percentage_se) print(\"Percent of Loan", "= bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for", "= banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends here", "loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends", "pandas as pd from scipy.stats import mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object')", "= (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval for Self employed people is", "here # code ends here # -------------- # code starts here bank.columns banks", "code ends here # -------------- # code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby", "starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values = loan_groupby.mean() print(mean_values) #", "#for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum()", "import mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var)", "banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here # -------------- # Code", "# -------------- # Import packages import numpy as np import pandas as pd", "import numpy as np import pandas as pd from scipy.stats import mode bank", "bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True))", "print(numerical_var) # code starts here # code ends here # -------------- # code", "banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in", "pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts here", "bank.select_dtypes(include='number') print(numerical_var) # code starts here # code ends here # -------------- #", "-------------- # code starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode =", "mode bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) #", "x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here # -------------- # Code starts", "loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status)", "is: \",percentage_nse) # code ends here # -------------- # code starts here loan_term", "here # -------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount',", ": \",percentage_se) print(\"Percent of Loan approval for people who are not self-employed is:", "numpy as np import pandas as pd from scipy.stats import mode bank =", "'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se = (self_emp_y &", "& loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se =", "'No' Loan_Status = 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n &", "code ends here # -------------- # code starts here self_emp_y = banks['Self_Employed'] ==", "packages import numpy as np import pandas as pd from scipy.stats import mode", "self-employed is: \",percentage_nse) # code ends here # -------------- # code starts here", "code ends here # -------------- # code starts here bank.columns banks = bank.drop('Loan_ID',axis=1)", "numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts here # code ends here #", "== 'Y' self_emp_n = banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se = (self_emp_y", "percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval for Self employed people", "# code starts here bank.columns banks = bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0)", "# -------------- # code starts here loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values", "for Self employed people is : \",percentage_se) print(\"Percent of Loan approval for people", "banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends here #", "values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here # -------------- # code starts here", "100 print(\"Percent of Loan approval for Self employed people is : \",percentage_se) print(\"Percent", "# code ends here # -------------- # code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda", "code ends here # -------------- # code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x", "starts here # code ends here # -------------- # code starts here bank.columns", "big_loan_term = banks[loan_term>=25].shape[0] # code ends here # -------------- # code starts here", "* 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of Loan approval for Self", "self_emp_n = banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1]", "##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here # --------------", "banks['Self_Employed'] == 'No' Loan_Status = 614 loan_approved_se = (self_emp_y & loan_status).value_counts()[1] loan_approved_nse =", "Self employed people is : \",percentage_se) print(\"Percent of Loan approval for people who", "approval for people who are not self-employed is: \",percentage_nse) # code ends here", "list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x:", "import pandas as pd from scipy.stats import mode bank = pd.read_csv(path) categorical_var =", "loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se = (loan_approved_se/Loan_Status) *", ": x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends here # -------------- #", "x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends here # -------------- # code", "# Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) #", "# code ends here # -------------- # code starts here loan_groupby = banks.groupby('Loan_Status')", "# code ends here # -------------- # code starts here self_emp_y = banks['Self_Employed']", "np import pandas as pd from scipy.stats import mode bank = pd.read_csv(path) categorical_var", "# -------------- # code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25", "approval for Self employed people is : \",percentage_se) print(\"Percent of Loan approval for", "= banks['Self_Employed'] == 'Yes' loan_status = banks['Loan_Status'] == 'Y' self_emp_n = banks['Self_Employed'] ==", "loan_groupby = banks.groupby('Loan_Status') loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']] mean_values = loan_groupby.mean() print(mean_values) # code ends", "print(\"Percent of Loan approval for people who are not self-employed is: \",percentage_nse) #", "x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum()", "x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends here # --------------", "#banks.isnull().sum() #code ends here # -------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount", "'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here # --------------", "bank.drop('Loan_ID',axis=1) banks.columns banks.isnull().sum() bank_mode = banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x", "here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code", "loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0] # code ends here # -------------- # code starts", "# -------------- # Code starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'],", "(self_emp_y & loan_status).value_counts()[1] loan_approved_nse = (self_emp_n & loan_status).value_counts()[1] print(loan_approved_se ,' ',loan_approved_nse, Loan_Status) percentage_se", "starts here banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends", "= bank.select_dtypes(include='number') print(numerical_var) # code starts here # code ends here # --------------", "Loan_Status) percentage_se = (loan_approved_se/Loan_Status) * 100 percentage_nse = (loan_approved_nse/Loan_Status) * 100 print(\"Percent of", "for people who are not self-employed is: \",percentage_nse) # code ends here #", "= banks[loan_term>=25].shape[0] # code ends here # -------------- # code starts here loan_groupby", "# code ends here # -------------- # code starts here bank.columns banks =", "= list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda", "= bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts here # code", "code starts here loan_term = banks['Loan_Amount_Term'].apply(lambda x : x/12) loan_term>=25 big_loan_term = banks[loan_term>=25].shape[0]", "banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks = banks[col].apply(lambda x: x.fillna(x.mode,inplace=True)) banks.fillna(bank_mode.loc[0,:],inplace=True) banks.isnull().sum() #banks.isnull().sum() #code ends here #", "bank.select_dtypes(include='object') print(categorical_var) numerical_var = bank.select_dtypes(include='number') print(numerical_var) # code starts here # code ends", "banks[['Gender','Married', 'Self_Employed','LoanAmount']] avg_loan_amount = pd.pivot_table(banks, values='LoanAmount', index=['Gender','Married','Self_Employed'], aggfunc=np.mean) # code ends here #", "code starts here # code ends here # -------------- # code starts here", "banks.mode(axis=0) #col = list(banks.columns) bank_mode.loc[0,:] banks.isnull().sum() #for x in banks.columns.values: # banks[x]=banks[x].fillna(value=bank_mode[x].loc[0]) ##banks" ]
[ "for loop to go from 1 -10 random_number = random.randint(1, 6) # ranint", "pick between 1 and 6 print(random_number) # print the # that was saved", "random_number = random.randint(1, 6) # ranint pick between 1 and 6 print(random_number) #", "6) # ranint pick between 1 and 6 print(random_number) # print the #", "from 1 -10 random_number = random.randint(1, 6) # ranint pick between 1 and", "go from 1 -10 random_number = random.randint(1, 6) # ranint pick between 1", "between 1 and 6 print(random_number) # print the # that was saved to", "to go from 1 -10 random_number = random.randint(1, 6) # ranint pick between", "and 6 print(random_number) # print the # that was saved to variable random_number", "# import random module for x in range(1,11): # for loop to go", "ranint pick between 1 and 6 print(random_number) # print the # that was", "import random module for x in range(1,11): # for loop to go from", "loop to go from 1 -10 random_number = random.randint(1, 6) # ranint pick", "#03_01_dice import random # import random module for x in range(1,11): # for", "random # import random module for x in range(1,11): # for loop to", "= random.randint(1, 6) # ranint pick between 1 and 6 print(random_number) # print", "1 and 6 print(random_number) # print the # that was saved to variable", "1 -10 random_number = random.randint(1, 6) # ranint pick between 1 and 6", "in range(1,11): # for loop to go from 1 -10 random_number = random.randint(1,", "# ranint pick between 1 and 6 print(random_number) # print the # that", "random.randint(1, 6) # ranint pick between 1 and 6 print(random_number) # print the", "range(1,11): # for loop to go from 1 -10 random_number = random.randint(1, 6)", "x in range(1,11): # for loop to go from 1 -10 random_number =", "for x in range(1,11): # for loop to go from 1 -10 random_number", "import random # import random module for x in range(1,11): # for loop", "module for x in range(1,11): # for loop to go from 1 -10", "random module for x in range(1,11): # for loop to go from 1", "# for loop to go from 1 -10 random_number = random.randint(1, 6) #", "-10 random_number = random.randint(1, 6) # ranint pick between 1 and 6 print(random_number)" ]
[ "out return inner def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path:", "!= None: print(\"Atempting to process {} from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name,", "as config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name =", "Path(file).__str__() for file in ignored_paths ] } return config def create_project(config_path, name, pdb_path,", "if Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name) is not", "loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could not be loaded or", "self.ignore_links.get(name) def load_protein(self, name, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path", "not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file in self.ignore_links if", "def list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name def", "P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) elif raw_data", "not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try:", "get_protein(self, name): try: if self.ignore_links.get(name) is None: return self.proteins[name] else: return None except:", "ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config, file) class Project: def __init__(self, config_path):", "\"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def _update_links(self): for pdb_name", "self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links", "def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e =", "not in self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val, Exception): return val self.proteins[id]", "self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if", "files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects =", "self.ignored_paths } return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def", "[Path(file) for file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file", "exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in RCSB database\".format(name))", "id, file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return", "exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path)))", "{} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound:", "\".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P test_code = '''class Project:", "raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority ==", "pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return", "else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else:", "def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path): try:", "at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\")))", "self.ignore_links[id] = True return None else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def", "perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\": name,", "self.ignore_links[id] = False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return", "CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0", "self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None)", "generate_default_ids(self): return [f.stem if f not in self.list_ignored() else \"\" for f in", "config def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path =", "data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff)", "proteins=None): if name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create new project:", "= {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id):", "'''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at", "project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load,", "get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\":", "not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not", "raise Exception(\"All jsons could not be loaded\") def get_config(self): config = { \"name\":", "json_load: print(\"Attempting to load {} from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) >", "priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and", "config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file:", "exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config, file) class Project: def", "data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name,", "= CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) >", "self.ignore_links.get(name) is None: return self.proteins[name] else: return None except: return Exception(\"{} not loaded", "data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name)) try: P", "self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def", "{ \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff,", "priority == \"json\": return self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name) else: return", "providided\") def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with", "def get_name(self): return self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path def", "config def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name,", "Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def", "self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}):", "\"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def _update_links(self):", "or handled\") def load_all_json(self, ids): try: for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id,", "return val self.proteins[id] = val self.ignore_links[id] = False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path,", "= { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\":", "self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path", "import json from shutil import copyfile from time import perf_counter def get_config(name, pdb_path,", "config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if", "else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else: raise Exception(\"Not a", "name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config =", "Exception(e) if len(P.residues) > 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{}", "else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority", "exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0 and Path(file_path).suffix", "self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links", "not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def", "self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not", "elif len(file_path) > 0: print(\"Atempting to process {} from directly from pdb file\".format(name))", "Path(file_path) not in self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val, Exception): return val", "None) self.ignore_links = [Path(file) for file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if", "return self.ignored_paths def _init_protein(self, id, file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except:", "project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins is not", "json_name in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file)", "= self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a", "new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {}", "= config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if", "return config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd()", "self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def generate_default_ids(self): return [f.stem if f", "sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P", "self._init_protein(name, file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path):", "print(\"Attempting to load {} from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) > 0:", "P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues)", "file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in", "in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename): start = perf_counter() out =", "exists, cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name))", "distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config, file) class Project: def __init__(self,", "self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def get_protein(self, name): try: if self.ignore_links.get(name)", "raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config = json.load(config_file)", "config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths", "not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name)))", "self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def", "is empty\".format(P.name)) return P test_code = '''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path", "self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path =", "Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path =", "} return config def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in", "Exception(\"All jsons could not be loaded\") def get_config(self): config = { \"name\": self.name,", "self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def add_protein(self, name,", "not None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name", "/ Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name,", "try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if", "def get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path def", "ignored_paths): config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\":", "pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in", "self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self):", "try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if", "file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(),", "return P else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts failed for", "to load {} from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting", "self.proteins[name] else: return None except: return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name,", "self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path)))", "if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files have valid", "if file_path.is_file(): if Path(file_path) not in self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val,", "files have valid identifier providided\") def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid", "for file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in", "not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not", "print(\"{} loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could not be loaded", "project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\"", "return P else: return Exception(\"{} is empty\".format(P.name)) elif data_load and data_url is not", "== \"pdb\": if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in", "in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links:", "not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name)", "try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0]", "taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path /", "sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in RCSB database\".format(name)) if len(P.residues) > 0:", "pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name,", "get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone)", "type(out) is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\":", "= self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins is not None:", "Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file in config[\"ignored_paths\"] ]", "P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P test_code = '''class Project: def", "self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path", "except: print(\"Project {} already exists, cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path =", "def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None)", "import sys import sqlalchemy from pathlib import Path from TPP.API.centroid_protein import CentroidProtein import", "be loaded or handled\") def load_all_json(self, ids): try: for json_file, id in zip(self.json_path.iterdir(),", "def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self): return self.pdb_path def", "project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True,", "file_name): file_path = None if Path(file_name).suffix == \".json\": file_path = self.json_path / Path(file_name)", "as file: json.dump(config, file) class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path)", "self.loaded_proteins = {} def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in", "return self.distance_cutoff def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except:", "name, priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name) elif priority == \"pdb\": return", "and json_load: print(\"Attempting to load {} from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path)", "except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0 and Path(file_path).suffix !=", "file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out) else: self.projects[project_name].append(out) def", "/ Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path =", "empty\".format(P.name)) else: print(\"All processing attempts failed for {}, check provided info and try", "self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links", "\"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__()", "self.json_links.get(name) is not None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name]", "self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not self.ignore_links.get(id):", "P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return", "else: return Exception(\"{} is empty\".format(P.name)) return P test_code = '''class Project: def __init__(self,", "from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting to process {}", "None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links", "is empty\".format(P.name)) elif raw_data != None: print(\"Atempting to process {} from raw text\".format(name))", "_init_protein(self, id, file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0]", "config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def get_protein(self, name): try: if", "and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def", "None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def", "get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self):", "print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs", "config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"]", "True return None else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path):", "self.projects = {} self.exclude_backbone = False self.distance_cutoff = 6 def add_protein(self, project_name, name,", "== \".json\": new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name) is not None: raise", "config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path", "self.config_path = Path(config_path) self.proteins = {} def get_protein(self, name): try: if self.ignore_links.get(name) is", "self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True)", "None) def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with", "os import sys import sqlalchemy from pathlib import Path from TPP.API.centroid_protein import CentroidProtein", "from directly from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e", "return P test_code = '''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path)", "= False self.distance_cutoff = 6 def add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\",", "else: self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path", "self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path", "def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\": name, \"pdb_path\":", "Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path = None if Path(file_name).suffix", "file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff)", "return config def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path", "if len(P.residues) > 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is", "= self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid())", "= data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name)) try: P =", "Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path):", "{} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name) is", "return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts failed for {}, check provided", "= { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\":", "return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return", "try: if self.ignore_links.get(name) is None: return self.proteins[name] else: return None except: return Exception(\"{}", "def load_protein_json(self, project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\",", "def get_ignore_path(self): return self.ignore_path def is_mc(self): return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff", "\"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self):", "= Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home()", "print(\"Project {} created!\".format(name)) except: print(\"Project {} already exists, cancelling operation\".format(name)) def load_protein_json(self, project,", "self.json_path / Path(file_path).name if self.json_links.get(name) is not None: raise Exception(\"{} already taken by", "self.projects[name] = [] if proteins is not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name))", "load_protein(self, id, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path = self.json_path", "try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins is", "get_config(self): config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\":", "Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self): return", "len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is", "inner def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with", "self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self): return not", "self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name", "new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file():", "> 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name))", "file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6,", "ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val,", "self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is empty\".format(P.name)) elif data_load and data_url", "is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could", "print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\",", "= 6 def add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out =", "in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()):", "/ Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins", "self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path):", "if f not in self.list_ignored() else \"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func):", "# self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing", "self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None)", "[f.stem if f not in self.list_ignored() else \"\" for f in self.list_pdb_files()] def", "{} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path =", "#self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) elif raw_data != None: print(\"Atempting", "\"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {} from JSON\".format(name)) return self.load_protein_json(project, name) elif", "= Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with", "except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in RCSB database\".format(name)) if len(P.residues) >", "JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting to process {} from", "else: return None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name):", "identifier providided\") def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path)))", "Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine:", "all files have valid identifier providided\") def _init_project(self, config_path): if not Path(config_path).is_file(): raise", "self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path", "self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links", "self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name,", "file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0 and", "Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix", "list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self):", "json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\",", "json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"])", "Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P def get_config(self):", "print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else: raise Exception(\"Not a valid {}", "= config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True)", "'''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {} def", "0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) else:", "config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not", "file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e)", "at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone", "} return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self):", "\"pdb\": if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links:", "= [Path(file) for file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for", "P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine: def __init__(self): self.projects", "data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out)", "= get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config,", "print(end - start) return out return inner def _init_project(self, config_path): if not Path(config_path).is_file():", "P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project,", "if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {} from", "in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files", "if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if", "not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path):", "if Path(file_path) not in self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val, Exception): return", "data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins,", "self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path),", "return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path): try: P", "does not exist in RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P)", "return P class TPP_Engine: def __init__(self): self.projects = {} def load_project(self, config_path): proj", "P def init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in", "new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if", "could not be loaded or handled\") def load_all_json(self, ids): try: for json_file, id", "data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at {}\".format(Path.home()", "have valid identifier providided\") def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config", "is empty\".format(P.name)) elif data_load and data_url is not None: data_url = data_url.format(name[:4] +", "return None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path", "self.config_path = Path(config_path) self.loaded_proteins = {} def _update_links(self): for pdb_name in self.pdb_links: if", "could not be loaded\") def get_config(self): config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(),", "# print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else: raise Exception(\"Not a valid", "created!\".format(name)) except: print(\"Project {} already exists, cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path", "self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file in", "P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All", "centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in", "path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"]", "else: self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else:", "not in self.list_ignored() else \"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self,", "for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename): start = perf_counter()", "def _init_protein(self, id, file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e =", "Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return", "Exception(\"Not all files have valid identifier providided\") def _init_project(self, config_path): if not Path(config_path).is_file():", "self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links,", "empty\".format(P.name)) elif raw_data != None: print(\"Atempting to process {} from raw text\".format(name)) try:", "if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name)", "func(self, id, filename) end = perf_counter() print(end - start) return out return inner", "list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) >", "file_path in proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"],", "download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in RCSB database\".format(name)) if", "prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb", "print(\"Project {} already exists, cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path = self.base_lib_path", "= config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path =", "Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False self.distance_cutoff = 6 def add_protein(self, project_name,", "new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name) is not None: raise Exception(\"{} already", "P class TPP_Engine: def __init__(self): self.projects = {} def load_project(self, config_path): proj =", "Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"]", "json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config, file) class Project:", "filename) end = perf_counter() print(end - start) return out return inner def _init_project(self,", "\"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\":", "{}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name]", "Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not a valid {}", "for file in ignored_paths ] } return config def create_project(config_path, name, pdb_path, json_path,", "return self.ignore_path def is_mc(self): return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self,", "\"json\": return self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def", "= {} def get_protein(self, name): try: if self.ignore_links.get(name) is None: return self.proteins[name] else:", "return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path = None if Path(file_name).suffix == \".json\":", "pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\":", "None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else:", "pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file),", "list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name,", "to create new project: {}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name]", "\"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name, file_path, json_load=True,", "for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name", "self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone)", "except: raise Exception(\"All jsons could not be loaded\") def get_config(self): config = {", "= config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for", "except: raise Exception(\"All pdbs could not be loaded or handled\") def load_all_json(self, ids):", "RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else:", "= Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links =", "self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if", "\"\" for pdb, file_path in proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path,", "self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) elif raw_data != None:", "= centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name,", "def _get_function_perf_decorator(func): def inner(self, id, filename): start = perf_counter() out = func(self, id,", "= Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links =", "distance_cutoff=6, proteins=None): if name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create new", "Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"]", "try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does", "False self.distance_cutoff = 6 def add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None):", "name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name", "file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())", "empty\".format(P.name)) return P def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\":", "self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path", "file_path = self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file():", "{} def get_protein(self, name): try: if self.ignore_links.get(name) is None: return self.proteins[name] else: return", "ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could not be loaded\") def get_config(self):", "is not None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {} from", "get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\":", "Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\":", "\"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for pdb, file_path in proteins: if", "_init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0]", "= {} def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()):", "[ Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links = {} if not self.pdb_path.is_dir():", "else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid", "distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in ignored_paths ] } return config def", "\"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config", "Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"]", "json.dump(config, file) class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins =", "inner(self, id, filename): start = perf_counter() out = func(self, id, filename) end =", "Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file():", "test_code = '''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins =", "add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self,", "= Path(config_path) self.loaded_proteins = {} def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name)", "__init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def generate_default_ids(self): return [f.stem", "self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try: P =", "def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def", "self.json_links, \"ignore_links\": self.ignore_links } return config def _update_links(self): for pdb_name in self.pdb_links: if", "located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects = {}", "\"wt\") as file: json.dump(config, file) class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path =", "pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config, file) class", "\"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def add_protein(self,", "file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def", "id)) except: raise Exception(\"All pdbs could not be loaded or handled\") def load_all_json(self,", "def list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path): try: P = CentroidProtein(id, file_path,", "True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for pdb, file_path in proteins:", "time import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config = {", "Path(file_name) if file_path.is_file(): if Path(file_path) not in self.ignored_paths: val = self._init_protein(id, file_path) if", "self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins is not None: self.add_dataset(name,", "for pdb, file_path in proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"],", "\"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name)) try: project_path =", "id, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path = self.json_path /", "for file in config[\"ignored_paths\"] ] self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if", "= val self.ignore_links[id] = False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] =", "if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None):", "= centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) >", "def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def", "self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file", "ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path):", "return P else: return Exception(\"{} is empty\".format(P.name)) elif raw_data != None: print(\"Atempting to", "if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name)", "{} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val,", "\"rt\") as config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name", "None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else:", "if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir()", "return self.proteins[id] else: return None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self,", "pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val =", "if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name)", "/ Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self,", "\"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\":", "{} def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class", "Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name) is not None:", "= True return None else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self,", "_update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for", "= Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path)", "exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path,", "self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def add_protein(self, name, file_path): if", "= func(self, id, filename) end = perf_counter() print(end - start) return out return", "file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e)", "init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() +", "empty\".format(P.name)) return P test_code = '''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path =", "config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links()", "P else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts failed for {},", "except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path = None", "out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out)", "Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\": return", "ignored_paths ] } return config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]):", "sqlalchemy from pathlib import Path from TPP.API.centroid_protein import CentroidProtein import json from shutil", "return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else: raise", "self.exclude_backbone = False self.distance_cutoff = 6 def add_protein(self, project_name, name, file_path, json_load=True, data_load=True,", "id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{}", "data_load and data_url is not None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to", "TPP_Engine: def __init__(self): self.projects = {} def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()]", "for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name", "self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def", "return self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self): return", "> 0: #raise Exception(\"Not all files have valid identifier providided\") def _init_project(self, config_path):", "= pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\": name", "{ \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [", "get_ignore_path(self): return self.ignore_path def is_mc(self): return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def", "!= \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P def get_config(self): config", "else: return Exception(\"{} is empty\".format(P.name)) elif data_load and data_url is not None: data_url", "if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{}", "load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name) is not None and", "#config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths)", "= config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links =", "and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P test_code", "return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path): try: P =", "json from shutil import copyfile from time import perf_counter def get_config(name, pdb_path, json_path,", "self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links", "start) return out return inner def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid", "self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir()", "= centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist", "def get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self): return not self.exclude_backbone", "file_path) if isinstance(val, Exception): return val self.proteins[id] = val self.ignore_links[id] = False return", "elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id)) except:", "_update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for", "cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P", "{}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could not be loaded or handled\") def", "return out return inner def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config", "/ Path(file_path).name if self.json_links.get(name) is not None: raise Exception(\"{} already taken by {}\".format(name,", "copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"):", "Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {} def _update_links(self):", "exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading", "self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path = None if Path(file_name).suffix", "if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not Path(config_path).is_file():", "def load_protein(self, name, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path =", "/ Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path) not", "import copyfile from time import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths):", "file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids,", "def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path /", "is empty\".format(P.name)) return P def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(),", "{}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path),", "return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self): return", "return P def init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name", "else: class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {}", "/ Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins is not None: self.add_dataset(name, proteins)", "pdb, file_path in proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"],", "filename): start = perf_counter() out = func(self, id, filename) end = perf_counter() print(end", "def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb", "try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except:", "{} self.exclude_backbone = False self.distance_cutoff = 6 def add_protein(self, project_name, name, file_path, json_load=True,", "= proj''' old = '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing", "else: raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file,", "proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for", "Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config = {", "top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at", "zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if", "raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {}", "self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self,", "def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def", "Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def get_protein(self,", "#if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0:", "sys import sqlalchemy from pathlib import Path from TPP.API.centroid_protein import CentroidProtein import json", "print(\"Atempting to process {} from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone,", "Exception(\"{} is empty\".format(P.name)) elif raw_data != None: print(\"Atempting to process {} from raw", "name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name))", "return self.proteins[name] else: return None except: return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self,", "config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {} def _update_links(self): for pdb_name in", "RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{}", "return Exception(\"{} is empty\".format(P.name)) elif raw_data != None: print(\"Atempting to process {} from", "\"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def add_protein(self, name, file_path): if Path(file_path).is_file():", "self.list_ignored() else \"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename):", "\"raw_data\": None}): prev_pdb = \"\" for pdb, file_path in proteins: if prev_pdb !=", "pathlib import Path from TPP.API.centroid_protein import CentroidProtein import json from shutil import copyfile", "return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else:", "from pathlib import Path from TPP.API.centroid_protein import CentroidProtein import json from shutil import", "if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix))", "config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine: def __init__(self):", "add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load,", "0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is empty\".format(P.name)) elif", "id): try: if not self.ignore_links.get(id): return self.proteins[id] else: return None except: raise Exception(\"{}", "{}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone", "list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path:", "in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links:", "new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self):", "f not in self.list_ignored() else \"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def", "in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links:", "json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if", "not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def", "self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\",", "not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in", "exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in ignored_paths ] } return", "self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def _update_links(self): for pdb_name in", "if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb =", "= self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path) not in self.ignored_paths: val =", "None except: return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority", "self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links,", "file) class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {}", "Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config =", "elif priority == \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name, file_name):", "is empty\".format(P.name)) return P class TPP_Engine: def __init__(self): self.projects = {} def load_project(self,", "{} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not", "in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could not be loaded\")", "distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone,", "exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\":", "= Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links =", "self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path", "P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues)", "folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at {}\".format(Path.home() /", "self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())]", "already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path", "except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P)", "and data_url is not None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process", "val self.ignore_links[id] = False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True", "json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None):", "self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {} already exists, cancelling operation\".format(name)) def", "P def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\":", "self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path): try: P = CentroidProtein(id,", "name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name)) try:", "self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def", "= config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir():", "from TPP.API.centroid_protein import CentroidProtein import json from shutil import copyfile from time import", "def generate_default_ids(self): return [f.stem if f not in self.list_ignored() else \"\" for f", "self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except:", "valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\":", "self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path) not in self.ignored_paths: val = self._init_protein(id,", "Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins =", "self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file)", "print(\"Attempting to download/process {} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load,", "config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name,", "if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links()", "def load_all_json(self, ids): try: for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except:", "self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def", "if isinstance(val, Exception): return val self.proteins[id] = val self.ignore_links[id] = False return val", "not be loaded\") def get_config(self): config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\":", "not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def", "print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id))", "config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"])", "json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path,", "list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files have", "{ \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths", "file_path.is_file(): if Path(file_path) not in self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val, Exception):", "else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts failed for {}, check", "import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\":", "name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to", "+ \".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\",", "else: return Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine: def __init__(self): self.projects =", "empty\".format(P.name)) elif data_load and data_url is not None: data_url = data_url.format(name[:4] + \".pdb\")", "Exception(\"{} does not exist in RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) #", "return self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self,", "self.proteins[id] else: return None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id,", "and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class", "from shutil import copyfile from time import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone,", "file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e)", "if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not self.ignore_links.get(id): return self.proteins[id]", "__init__(self): self.projects = {} def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj'''", "zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could not be loaded\") def", "Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{}", "does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does", "return config def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()):", "to download/process {} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url)", "load {} from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting to", "{} from directly from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except:", "{}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins", "exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create", "if name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name))", "in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()):", "copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if", "self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if", "if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file:", "= \"\" for pdb, file_path in proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb,", "/ Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self,", "None: print(\"Atempting to process {} from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\",", "self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None)", "priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name)", "def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def generate_default_ids(self): return", "self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return", "self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file)", "if not self.ignore_links.get(id): return self.proteins[id] else: return None except: raise Exception(\"{} is invalid/ignored\".format(id))", "self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if", "file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if", "def get_protein(self, name): try: if self.ignore_links.get(name) is None: return self.proteins[name] else: return None", "self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return", "def get_protein(self, id): try: if not self.ignore_links.get(id): return self.proteins[id] else: return None except:", "data_url is not None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {}", "self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def _update_links(self): for", "Exception(\"{} is empty\".format(P.name)) elif data_load and data_url is not None: data_url = data_url.format(name[:4]", "in RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P", "Path(file_name).suffix == \".json\": file_path = self.json_path / Path(file_name) else: file_path = self.pdb_path /", "return self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting to process {} from directly", "list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name,", "add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb =", "return None except: return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if", "config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff,", "ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff,", "list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files have valid identifier providided\") def _init_project(self,", "isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id)) except: raise", "list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self):", "try: if not self.ignore_links.get(id): return self.proteins[id] else: return None except: raise Exception(\"{} is", "if priority == \"pdb\": if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name)", "open(config_path, \"rt\") as config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"]", "return P def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(),", "file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path):", "len(P.residues) > 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name))", "in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for", "config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"]", "file_path = None if Path(file_name).suffix == \".json\": file_path = self.json_path / Path(file_name) else:", "{} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try:", "Exception(\"All pdbs could not be loaded or handled\") def load_all_json(self, ids): try: for", "{}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path", "is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path /", "return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self, id,", "= self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out) else:", "None else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file():", "type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All", "if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try: P = CentroidProtein(name,", "Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a", "Exception(\"{} is empty\".format(P.name)) return P def get_config(self): config = { \"name\": self.name, \"pdb_path\":", "P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not", "/ Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P", "already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path", "Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as", "load_protein(self, name, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path = self.json_path", "config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"]", "exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) >", "to process {} from directly from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path,", "self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not self.ignore_links.get(id): return self.proteins[id] else: return", "\"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config", "jsons could not be loaded\") def get_config(self): config = { \"name\": self.name, \"pdb_path\":", "Exception(\"{} is empty\".format(P.name)) return P test_code = '''class Project: def __init__(self, config_path): self._init_project(config_path)", "raw_data != None: print(\"Atempting to process {} from raw text\".format(name)) try: P =", "config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff,", "...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val, type(None)):", "not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(), ids):", "self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if", "not be loaded or handled\") def load_all_json(self, ids): try: for json_file, id in", "get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self): return not self.exclude_backbone def", "for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could", "def load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name) is not None", "self.ignored_paths = [ Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links = {} if", "self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not", "= config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links =", "self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename): start = perf_counter() out = func(self,", "P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues)", "print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located", "name) elif len(file_path) > 0: print(\"Atempting to process {} from directly from pdb", "else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file():", "Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files", "file: json.dump(config, file) class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins", "return self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self): return not self.exclude_backbone def get_cutoff(self):", "Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files have valid identifier", "\"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links }", "def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path,", "self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting to process {} from directly from", "is empty\".format(P.name)) else: print(\"All processing attempts failed for {}, check provided info and", "\"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\":", "name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {} from JSON\".format(name))", "= {} def generate_default_ids(self): return [f.stem if f not in self.list_ignored() else \"\"", "__init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def get_protein(self, name): try:", "name, priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file()", "else: return Exception(\"{} is empty\".format(P.name)) return P def get_config(self): config = { \"name\":", "else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise", "return inner def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path)))", "= sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return", "not self.ignore_links.get(id): return self.proteins[id] else: return None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator", "self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links", "len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is", "does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(),", "if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys()", "list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self):", "None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for", "a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{}", "self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try: P", "Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name) is", "} return config def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\":", "list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone)", "if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{}", "raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else:", "add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name", "import os import sys import sqlalchemy from pathlib import Path from TPP.API.centroid_protein import", "\"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for pdb, file_path in proteins: if prev_pdb", "self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not Path(config_path).is_file(): raise", "def inner(self, id, filename): start = perf_counter() out = func(self, id, filename) end", "Path(config_path) self.proteins = {} def generate_default_ids(self): return [f.stem if f not in self.list_ignored()", "{} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path", "self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return", "None}): prev_pdb = \"\" for pdb, file_path in proteins: if prev_pdb != pdb:", "_get_function_perf_decorator(func): def inner(self, id, filename): start = perf_counter() out = func(self, id, filename)", "\"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def add_protein(self, name, file_path):", "list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self,", "self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return", "loaded\") def get_config(self): config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\":", "in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)])", "config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as", "if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name)", "raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) #", "json_path, exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(),", "project: {}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if", "self.ignore_links } return config def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not", "\"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in ignored_paths", "json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could not", "os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack", "val = self._init_protein(id, file_path) if isinstance(val, Exception): return val self.proteins[id] = val self.ignore_links[id]", "if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file in", "prev_pdb = \"\" for pdb, file_path in proteins: if prev_pdb != pdb: self.add_protein(project_name,", "if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if", "\"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path = None", "self.ignore_path def is_mc(self): return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name,", "self.proteins[id] = val self.ignore_links[id] = False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id]", "Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P test_code =", "{} already exists, cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path = self.base_lib_path /", "if Path(file_name).suffix == \".json\": file_path = self.json_path / Path(file_name) else: file_path = self.pdb_path", "{} created!\".format(name)) except: print(\"Project {} already exists, cancelling operation\".format(name)) def load_protein_json(self, project, name):", "self._init_protein(id, file_path) if isinstance(val, Exception): return val self.proteins[id] = val self.ignore_links[id] = False", "Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else:", "e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid()))", "TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home()", "self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could not be loaded\") def get_config(self): config", "self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path))", "from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0]", "self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links,", "Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config = json.load(config_file) self.distance_cutoff", "\"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename): start =", "{} from JSON\".format(name)) return self.load_protein_json(project, name) elif len(file_path) > 0: print(\"Atempting to process", "raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path", "for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links =", "config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"]", "data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name", "Exception): return val self.proteins[id] = val self.ignore_links[id] = False return val else: #", "Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts failed for {}, check provided info", "old = '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data", "not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in", "def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path,", "def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(),", "sys.exc_info()[0] return Exception(e) if len(P.residues) > 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else:", "file_path = self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise", "= self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name]", "self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not a", "config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def generate_default_ids(self): return [f.stem if", "\"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links }", "} return config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path =", "if self.json_links.get(name) is not None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else:", "= perf_counter() print(end - start) return out return inner def _init_project(self, config_path): if", "perf_counter() print(end - start) return out return inner def _init_project(self, config_path): if not", "\"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in ignored_paths ] } return config", "#raise Exception(\"Not all files have valid identifier providided\") def _init_project(self, config_path): if not", "in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.json_path.iterdir()):", "None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else:", "return None else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if", "self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path)", "Path from TPP.API.centroid_protein import CentroidProtein import json from shutil import copyfile from time", "config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone,", "try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if", "with open(config_path, \"wt\") as file: json.dump(config, file) class Project: def __init__(self, config_path): self._init_project(config_path)", "raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e", "raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id", "shutil import copyfile from time import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff,", "> 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is empty\".format(P.name))", "self.json_links, \"ignore_links\": self.ignore_links } return config def add_protein(self, name, file_path): if Path(file_path).is_file(): if", "#self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is empty\".format(P.name)) elif data_load and data_url is", "!= \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine: def", "/ Path(file_name) if file_path.is_file(): if Path(file_path) not in self.ignored_paths: val = self._init_protein(id, file_path)", "data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to", "try: for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id))", "0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) elif", "if self.ignore_links.get(name) is None: return self.proteins[name] else: return None except: return Exception(\"{} not", "config[\"ignored_paths\"] ] self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True)", "= Path(config_path) self.proteins = {} def generate_default_ids(self): return [f.stem if f not in", "def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path", "len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise", "print(\"Attempting to create new project: {}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path)", "os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {} from JSON\".format(name)) return self.load_protein_json(project,", "{} from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data)", "valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config = { \"name\": self.name,", "load_protein_json(self, project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\",", "valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name)", "else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path))", "+ \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {} from JSON\".format(name)) return self.load_protein_json(project, name)", "False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else:", "def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name))", "Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir():", "perf_counter() out = func(self, id, filename) end = perf_counter() print(end - start) return", "load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {} as", "load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name, file_path, json_load=True, data_load=True,", "prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\":", "return [f.stem if f not in self.list_ignored() else \"\" for f in self.list_pdb_files()]", "True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for pdb, file_path", "yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name) elif priority", "Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{}", "self.proteins = {} def get_protein(self, name): try: if self.ignore_links.get(name) is None: return self.proteins[name]", "P test_code = '''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins", "try: for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons", "project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\")", "proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {} already exists, cancelling operation\".format(name)) def load_protein_json(self,", "file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path", "loaded or handled\") def load_all_json(self, ids): try: for json_file, id in zip(self.json_path.iterdir(), ids):", "config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [", "\"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else: raise Exception(\"Not a valid {} file\".format(file_path.suffix))", "= config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path =", "self._update_links() def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e", "[ Path(file).__str__() for file in ignored_paths ] } return config def create_project(config_path, name,", "self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return", "def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def get_protein(self, name):", "Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home()", "from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except:", "raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name ==", "\"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for pdb, file_path in", "create new project: {}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] =", "ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could not", "if proteins is not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {}", "valid identifier providided\") def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path:", "\"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths }", "to process {} from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load,", "= {} def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old =", "self.ignore_links } return config def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix ==", "name): def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\":", "{} def generate_default_ids(self): return [f.stem if f not in self.list_ignored() else \"\" for", "\".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone,", "return Exception(\"{} is empty\".format(P.name)) return P def get_config(self): config = { \"name\": self.name,", "TPP.API.centroid_protein import CentroidProtein import json from shutil import copyfile from time import perf_counter", "return Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine: def __init__(self): self.projects = {}", "= False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None", "not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config", "= {} self.exclude_backbone = False self.distance_cutoff = 6 def add_protein(self, project_name, name, file_path,", "<reponame>Buddyboy201/top_pro_pack-v3 import os import sys import sqlalchemy from pathlib import Path from TPP.API.centroid_protein", "\"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\")", "def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def", "None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path =", "= { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\":", "name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and", "\"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in RCSB", "P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P def get_config(self): config = {", "def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone,", "= None if Path(file_name).suffix == \".json\": file_path = self.json_path / Path(file_name) else: file_path", "import CentroidProtein import json from shutil import copyfile from time import perf_counter def", "P else: return Exception(\"{} is empty\".format(P.name)) elif raw_data != None: print(\"Atempting to process", "a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config = { \"name\":", "] self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def", "= Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file in config[\"ignored_paths\"]", "\".json\": file_path = self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name) if", "= self._init_protein(id, file_path) if isinstance(val, Exception): return val self.proteins[id] = val self.ignore_links[id] =", "a valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix ==", "def add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path,", "> 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return", "name): try: if self.ignore_links.get(name) is None: return self.proteins[name] else: return None except: return", "pdbs could not be loaded or handled\") def load_all_json(self, ids): try: for json_file,", "= centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e)", "config path: {}\".format(Path(config_path))) with open(config_path, \"rt\") as config_file: config = json.load(config_file) self.distance_cutoff =", "isinstance(val, Exception): return val self.proteins[id] = val self.ignore_links[id] = False return val else:", "in ignored_paths ] } return config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6,", "centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name, file_path,", "self.distance_cutoff = 6 def add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out", "{ \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links,", "self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path)", "/ Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {}", "{} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config = { \"name\": self.name, \"pdb_path\":", "/ Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) except: print(\"top_pro_pack data", "import sqlalchemy from pathlib import Path from TPP.API.centroid_protein import CentroidProtein import json from", "with open(config_path, \"rt\") as config_file: config = json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone =", "and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P class", "== \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name)) try: project_path", "raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file(): if", "Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False self.distance_cutoff", "return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else:", "self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self,", "def load_protein(self, id, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path =", "id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All jsons could not be", "6 def add_protein(self, project_name, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name,", "None: return self.proteins[name] else: return None except: return Exception(\"{} not loaded yet\".format(name)) def", "P else: return Exception(\"{} is empty\".format(P.name)) elif data_load and data_url is not None:", "name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name),", "in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file in self.ignore_links if file", "pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in", "data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load", "from time import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config =", "file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\": if self.pdb_links.get(name) is not", "not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not self.ignore_links.get(id): return self.proteins[id] else:", "copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def", "self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {} def _update_links(self): for pdb_name in self.pdb_links:", "process {} from directly from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone)", "Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = [] if proteins is not None: self.add_dataset(name, proteins) print(\"Project", "self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid", "name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return", "already exists, cancelling operation\".format(name)) def load_protein_json(self, project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project,", "self.ignore_links = [Path(file) for file in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file)", "#self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts failed", "pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False,", "empty\".format(P.name)) return P class TPP_Engine: def __init__(self): self.projects = {} def load_project(self, config_path):", "= self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): if", "centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0:", "= '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder", "return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self): return", "sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P", "_init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config path: {}\".format(Path(config_path))) with open(config_path, \"rt\")", "except: return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority ==", "out = func(self, id, filename) end = perf_counter() print(end - start) return out", "else: print(\"{} loaded as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could not be", "from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return", "self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir()", "text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e =", "raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path = None if", "distance_cutoff, ignored_paths): config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone,", "= { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\":", "def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {} def _update_links(self): for", "create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\": name = name.format(len(self.projects)+1) print(\"Attempting", "/ Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False", "proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine: def __init__(self): try:", "self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not", "name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self,", "def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {}", "Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path) not in", "__init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() / Path(\"top_pro_pack/bin\")))", "config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff,", "self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project:", "= name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name)) try: project_path = self.base_lib_path /", "def __init__(self): self.projects = {} def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] =", "add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path /", "def get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name) elif priority ==", "valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does", "def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack data folder at {}\".format(Path.home() /", "is invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path = None if Path(file_name).suffix ==", "ids, pdb_filter=None): try: for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {} as {}", "self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not self.ignore_links.get(id): return self.proteins[id] else: return None", "get_name(self): return self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self):", "valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path", "raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path", "\"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues)", "start = perf_counter() out = func(self, id, filename) end = perf_counter() print(end -", "in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting to load {} from JSON\".format(name)) return", "#self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P def init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None):", "= config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True)", "else: raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else:", "self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out) else: self.projects[project_name].append(out)", "is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True,", "data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects", "file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load:", "config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"])", "CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0", "in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files have valid identifier providided\") def", "Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\":", "/ Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not a valid", "else: return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path = None if Path(file_name).suffix ==", "{} as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val)", "None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name)) try:", "if type(out) is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True,", "is not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {} already exists,", "proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb", "file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path = self.json_path / Path(file_path).name if", "json_name in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in", "load_all_json(self, ids): try: for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise", "= sys.exc_info()[0] return Exception(e) if len(P.residues) > 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff)", "download/process {} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url) except", "Path(config_path) self.loaded_proteins = {} def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not", "database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return", "get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config = { \"name\": name, \"pdb_path\": Path(pdb_path).__str__(),", "priority == \"pdb\": if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not", "directly from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e =", "0: #raise Exception(\"Not all files have valid identifier providided\") def _init_project(self, config_path): if", "def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def", "name, \"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for", "as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif", "json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd() + \"\\\\top_pro_pack_logs\") and json_load: print(\"Attempting", "\"pdb_path\": Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file", "return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return", "else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config", "else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\":", "\"ignored_paths\": [ Path(file).__str__() for file in ignored_paths ] } return config def create_project(config_path,", "= '''class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {}", "= Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not", "not None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name))", "self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links", "as {}\".format(pdb_file, id)) except: raise Exception(\"All pdbs could not be loaded or handled\")", "@_get_function_perf_decorator def load_protein(self, id, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path", "Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True)", "Exception): print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as {}\".format(pdb_file,", "process {} from raw text\".format(name)) try: P = centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url,", "be loaded\") def get_config(self): config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(),", "self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def __init__(self,", "self.pdb_links.get(pdb_name) not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not", "if self.pdb_links.get(name) is not None and self.pdb_links.get(name).is_file() and self.pdb_links.get(name) not in self.ignore_links: self._init_protein(name,", "operation\".format(name)) def load_protein_json(self, project, name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P =", "\"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"ignore_path\": self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\":", "Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for pdb_file, id in", "\"ignore_links\": self.ignore_links } return config def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name)", "list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name,", "in self.ignore_links: self._init_protein(name, self.pdb_path / Path(self.pdb_links.get(name))) else: class Project: def __init__(self, config_path): self._init_project(config_path)", "Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path)", "self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) else: print(\"All processing attempts", "return self.name def get_pdb_path(self): return self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self): return", "None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {} already exists, cancelling operation\".format(name))", "loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name) elif", "config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True)", "raise Exception(\"{} does not exist\".format(Path(file_path))) def remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise", "return Exception(e) if len(P.residues) > 0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return", "isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded as", "Path(pdb_path).__str__(), \"json_path\": Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in", "list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file in self.ignore_links if file in", "self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] =", "P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is empty\".format(P.name)) elif data_load", "# self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return Exception(\"{} is empty\".format(P.name)) elif data_load and", "not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all files have valid identifier providided\")", "Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links = {}", "elif raw_data != None: print(\"Atempting to process {} from raw text\".format(name)) try: P", "if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not", "if priority == \"json\": return self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name) else:", "Path(config_path) self.proteins = {} def get_protein(self, name): try: if self.ignore_links.get(name) is None: return", "pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self,", "create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() / Path(\"{}_config.json\".format(name)) config", "id in zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id,", "new project: {}\".format(name)) try: project_path = self.base_lib_path / Path(\"bin/{}\".format(name)) os.makedirs(project_path) self.projects[name] = []", "Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix))", "return Exception(\"{} does not exist in RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff)", "data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is", "= self._init_protein(name, file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, name,", "= json.load(config_file) self.distance_cutoff = config[\"distance_cutoff\"] self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path =", "elif data_load and data_url is not None: data_url = data_url.format(name[:4] + \".pdb\") print(\"Attempting", "\"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return config def _update_links(self): for pdb_name in self.pdb_links:", "centroid_protein.CentroidProtein(name, \"\", exclude_backbone=self.exclude_backbone, download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e) if", "for json_name in self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name", "not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {} already exists, cancelling", "self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self): return not self.exclude_backbone def get_cutoff(self): return", "= Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignore_path = Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links =", "raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name): def get_config(self): config =", "self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path): try: P = CentroidProtein(name,", "> 0: print(\"Atempting to process {} from directly from pdb file\".format(name)) try: P", "import Path from TPP.API.centroid_protein import CentroidProtein import json from shutil import copyfile from", "not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid", "def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name, None)", "else: return None except: return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"):", "Path(json_file)) except: raise Exception(\"All jsons could not be loaded\") def get_config(self): config =", "self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self):", "\".json\": new_file_path = self.json_path / Path(file_path).name if self.json_links.get(name) is not None: raise Exception(\"{}", "for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self,", "self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.ignore_path.is_dir():", "return Exception(\"{} is empty\".format(P.name)) elif data_load and data_url is not None: data_url =", "self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) def get_protein(self, id): try: if not self.ignore_links.get(id): return", "self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths", "= CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) >", "self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links", "is None: return self.proteins[name] else: return None except: return Exception(\"{} not loaded yet\".format(name))", "= self.pdb_path / Path(file_name) if file_path.is_file(): self.proteins[name] = self._init_protein(name, file_path) else: raise Exception(\"Not", "\"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self): return self.pdb_path.glob(\"*.pdb\") def list_json_files(self): return self.json_path.glob(\"*.json\")", "= self.json_path / Path(file_path).name if self.json_links.get(name) is not None: raise Exception(\"{} already taken", "copyfile from time import perf_counter def get_config(name, pdb_path, json_path, exclude_backbone, distance_cutoff, ignored_paths): config", "new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self, name,", "self.json_links.get(json_name) not in list(self.json_path.iterdir()): self.json_links.pop(json_name, None) self.ignore_links = [Path(file) for file in self.ignore_links", "= config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self,", "self.json_links: if self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if", "invalid/ignored\".format(id)) @_get_function_perf_decorator def load_protein(self, id, file_name): file_path = None if Path(file_name).suffix == \".json\":", "\"ignore_links\": self.ignore_links } return config def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix", "= Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not", "and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P def", "self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self): return self.name def get_pdb_path(self): return self.pdb_path", "= Path(config_path) self.proteins = {} def get_protein(self, name): try: if self.ignore_links.get(name) is None:", "self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try: for", "{} def _update_links(self): for pdb_name in self.pdb_links: if self.pdb_links.get(pdb_name) not in list(self.pdb_path.iterdir()): self.pdb_links.pop(pdb_name,", "Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine: def __init__(self): self.projects = {} def", "Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False self.distance_cutoff = 6 def", "else: return Exception(\"{} is empty\".format(P.name)) elif raw_data != None: print(\"Atempting to process {}", "Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def", "in proteins: if prev_pdb != pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"])", "not None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix", "val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\") self.ignore_links[id] = True return None else: raise Exception(\"Not", "a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\":", "] } return config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path", "config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False, distance_cutoff=6, ignored_paths=[]): #config_path = Path.cwd() /", "pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\": name =", "in zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file))", "self.ignored_paths def _init_protein(self, id, file_path): try: P = CentroidProtein(id, file_path, exclude_backbone=self.exclude_backbone) except: e", "if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path):", "proj''' old = '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\")) print(\"Initializing top_pro_pack", "Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise", "proteins is not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project {} already", "else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path) not in self.ignored_paths:", "list_json_files(self): return self.json_path.glob(\"*.json\") def list_ignored(self): return self.ignored_paths def _init_protein(self, id, file_path): try: P", "if isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else: print(\"{} loaded", "Path.cwd() / Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path,", "in self.ignore_links if file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir())] #if len([Path(file) for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if", "data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if", "def init_protein(self, project, name, file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): if name in os.listdir(os.getcwd()", "by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name", "id, filename) end = perf_counter() print(end - start) return out return inner def", "in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if not Path(config_path).is_file(): raise Exception(\"invalid config", "= sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return", "remove_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self,", "handled\") def load_all_json(self, ids): try: for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file))", "!= pdb: self.add_protein(project_name, pdb, file_path, json_load=modifers[\"json_load\"], data_load=modifers[\"data_load\"], data_url=modifers[\"data_url\"], raw_data=modifers[\"raw_data\"]) prev_pdb = pdb def", "self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() / Path(\"top_pro_pack/bin\"))", "ignore_protein(self, name): def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(),", "\"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"ignored_paths\": self.ignored_paths } return config def list_pdb_files(self):", "self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir():", "= new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def load_protein(self,", "return Exception(\"{} is empty\".format(P.name)) return P test_code = '''class Project: def __init__(self, config_path):", "__init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.loaded_proteins = {} def _update_links(self): for pdb_name", "file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P)", "open(config_path, \"wt\") as file: json.dump(config, file) class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path", "\"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in ignored_paths ] }", "json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self,", "val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{} is", "for file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not", "class Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def", "Path(file_path).name if self.json_links.get(name) is not None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name)))", "!= \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P test_code = '''class", "exist in RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return", "raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data) if type(out) is Exception:", "data_url.format(name[:4] + \".pdb\") print(\"Attempting to download/process {} from RCSB\".format(name)) try: P = centroid_protein.CentroidProtein(name,", "self.exclude_backbone = config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links", "self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False self.distance_cutoff =", "= perf_counter() out = func(self, id, filename) end = perf_counter() print(end - start)", "load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine: def", "not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path,", "else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if", "e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0 and Path(file_path).suffix != \".json\":", "return Exception(\"{} not loaded yet\".format(name)) def get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\":", "raw_data=raw_data) if type(out) is Exception: print(out) else: self.projects[project_name].append(out) def add_dataset(self, project_name, proteins, modifers={\"json_load\":", "file in list(self.pdb_path.iterdir())+list(self.json_path.iterdir()) if Path(file) not in list(self.pdb_links.values())+list(self.json_links.values())+list(self.ignore_links)]) > 0: #raise Exception(\"Not all", "id, filename): start = perf_counter() out = func(self, id, filename) end = perf_counter()", "self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return", "print(\"Atempting to process {} from directly from pdb file\".format(name)) try: P = centroid_protein.CentroidProtein(name,", "get_protein(self, id): try: if not self.ignore_links.get(id): return self.proteins[id] else: return None except: raise", "/ Path(\"{}_config.json\".format(name)) config = get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\")", "= [ Path(file) for file in config[\"ignored_paths\"] ] self.ignore_links = {} if not", "name = name.format(len(self.projects)+1) print(\"Attempting to create new project: {}\".format(name)) try: project_path = self.base_lib_path", "= [] if proteins is not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except:", "f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename): start = perf_counter() out", "val self.proteins[id] = val self.ignore_links[id] = False return val else: # print(\"\\n#################################################\\n#########################################################\\n\",file_path, \"\\n#################################################\\n#########################################################\\n\\n\")", "Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine: def __init__(self): try: os.makedirs(Path.home() /", "class TPP_Engine: def __init__(self): self.projects = {} def load_project(self, config_path): proj = Project(Path(config_path))", "exclude_backbone=self.exclude_backbone) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) #", "Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix ==", "print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val = self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception):", "self.proteins = {} def generate_default_ids(self): return [f.stem if f not in self.list_ignored() else", "= self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file))", "priority == \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path", "else \"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id, filename): start", "len(file_path) > 0: print(\"Atempting to process {} from directly from pdb file\".format(name)) try:", "except: print(\"top_pro_pack data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() /", "\"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\": self.json_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\":", "taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path /", "== \"json\": return self.json_links.get(name) elif priority == \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name)", "self.json_links.get(name))) else: self.json_links[name] = Path(new_file_path).name else: new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] =", "get_config(name=name, pdb_path=pdb_path, json_path=json_path, exclude_backbone=exclude_backbone, distance_cutoff=distance_cutoff, ignored_paths=ignored_paths) with open(config_path, \"wt\") as file: json.dump(config, file)", "Path(file_path).is_file(): self.ignored_paths.remove(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path))) def load_all_pdbs(self, ids, pdb_filter=None): try:", "return self.pdb_path def get_json_path(self): return self.json_path def get_ignore_path(self): return self.ignore_path def is_mc(self): return", "download_data=data_load, data_url=data_url, raw_data=raw_data) except: e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0:", "raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def add_protein(self, file_path): if Path(file_path).is_file(): if Path(file_path).suffix", "= Path(config[\"ignore_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not", "Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix)) def ignore_protein(self, name):", "name, file_name): file_path = None if Path(file_name).suffix == \".json\": file_path = self.json_path /", "is_mc(self): return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path): try:", "self.projects = {} def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old", "not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path): try: P =", "== \"pdb\": return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path =", "= config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths =", "return self.pdb_links.get(name) else: return self.ignore_links.get(name) def load_protein(self, name, file_name): file_path = None if", "None if Path(file_name).suffix == \".json\": file_path = self.json_path / Path(file_name) else: file_path =", "list_pdbs(self): return self.pdb_path.iterdir() def list_json(self): return self.json_path.iterdir() def list_ignored(self): return self.ignore_path.iterdir() def get_name(self):", "self.load_protein(id, Path(pdb_file)) if isinstance(val, Exception): print(val) elif isinstance(val, type(None)): print(\"{} is ignored\".format(pdb_file)) else:", "in self.ignore_links: if self.ignore_links.get(ignore_name) not in list(self.list_ignored()): self.ignore_links.pop(ignore_name, None) def _init_project(self, config_path): if", "def is_mc(self): return not self.exclude_backbone def get_cutoff(self): return self.distance_cutoff def _init_protein(self, name, file_path):", "Project: def __init__(self, config_path): self._init_project(config_path) self.config_path = Path(config_path) self.proteins = {} def generate_default_ids(self):", "self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path)", "in self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val, Exception): return val self.proteins[id] =", "= Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False self.distance_cutoff = 6", "new_file_path = self.pdb_path / Path(file_path).name self.pdb_links[name] = new_file_path copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not", "file\".format(file_path.suffix)) def add_ignored_path(self, file_path): if Path(file_path).is_file(): self.ignored_paths.append(Path(file_path)) else: raise Exception(\"{} does not exist\".format(Path(file_path)))", "Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path =", "not exist in RCSB database\".format(name)) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid()))", "- start) return out return inner def _init_project(self, config_path): if not Path(config_path).is_file(): raise", "{}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone =", "0: print(\"Atempting to process {} from directly from pdb file\".format(name)) try: P =", "is not None: raise Exception(\"{} already taken by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] =", "file_path = self.pdb_path / Path(file_name) if file_path.is_file(): if Path(file_path) not in self.ignored_paths: val", "self.ignored_paths: val = self._init_protein(id, file_path) if isinstance(val, Exception): return val self.proteins[id] = val", "self.ignore_links.get(id): return self.proteins[id] else: return None except: raise Exception(\"{} is invalid/ignored\".format(id)) @_get_function_perf_decorator def", "self.distance_cutoff def _init_protein(self, name, file_path): try: P = CentroidProtein(name, file_path, exclude_backbone=self.exclude_backbone) except: e", "CentroidProtein import json from shutil import copyfile from time import perf_counter def get_config(name,", "file\".format(file_path.suffix)) def add_protein(self, name, file_path): if Path(file_path).is_file(): if Path(file_path).suffix == \".json\": new_file_path =", "ids): try: for json_file, id in zip(self.json_path.iterdir(), ids): self.load_protein(id, Path(json_file)) except: raise Exception(\"All", "Path(config[\"json_path\"]) self.pdb_links = config[\"pdb_links\"] self.json_links = config[\"json_links\"] self.ignore_links = config[\"ignore_links\"] if not self.pdb_path.is_dir():", "\".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P class TPP_Engine: def __init__(self):", "def ignore_protein(self, name): def get_config(self): config = { \"name\": self.name, \"pdb_path\": self.pdb_path.__str__(), \"json_path\":", "for pdb_file, id in zip(self.list_pdb_files(), ids): print(\"loading {} as {} ...\".format(Path(pdb_file), id)) val", "file_path, json_load=True, data_load=True, data_url=\"https://files.rcsb.org/download/{}\", raw_data=None): out = self.init_protein(name, file_path, json_load=json_load, data_load=data_load, data_url=data_url.format(name), raw_data=raw_data)", "get_filename_from_name(self, name, priority=\"pdb\"): if priority == \"json\": return self.json_links.get(name) elif priority == \"pdb\":", "by {}\".format(name, self.json_links.get(name))) else: self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name", "modifers={\"json_load\": True, \"data_load\": True, \"data_url\": \"https://files.rcsb.org/download/{}\", \"raw_data\": None}): prev_pdb = \"\" for pdb,", "self.ignore_path.is_dir(): self.ignore_path.mkdir(parents=True) if not self.json_path.is_dir(): self.json_path.mkdir(parents=True) self._update_links() def list_loaded_proteins(self): return self.proteins.keys() def list_pdbs(self):", "= self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise Exception(\"Not a valid {} file\".format(file_path.suffix))", "raise Exception(\"All pdbs could not be loaded or handled\") def load_all_json(self, ids): try:", "e = sys.exc_info()[0] return Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid())", "== \".json\": file_path = self.json_path / Path(file_name) else: file_path = self.pdb_path / Path(file_name)", "name): file_path = self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path)", "self.json_links.get(json_name) not in list(self.list_json()): self.json_links.pop(json_name, None) for ignore_name in self.ignore_links: if self.ignore_links.get(ignore_name) not", "[] if proteins is not None: self.add_dataset(name, proteins) print(\"Project {} created!\".format(name)) except: print(\"Project", "in self.list_ignored() else \"\" for f in self.list_pdb_files()] def _get_function_perf_decorator(func): def inner(self, id,", "end = perf_counter() print(end - start) return out return inner def _init_project(self, config_path):", "self.json_links[name] = Path(file_path).suffix else: new_file_path = self.pdb_path / Path(file_path).name copyfile(Path(file_path), new_file_path) else: raise", "file in ignored_paths ] } return config def create_project(config_path, name, pdb_path, json_path, exclude_backbone=False,", "in config[\"ignored_paths\"] ] self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not self.json_path.is_dir():", "config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.ignored_paths = [ Path(file) for file", "self.base_lib_path / Path(\"bin/{}/{}/data.json\".format(project, name)) P = centroid_protein.CentroidProtein(\"\", \"\", load_json=True, json_data_file_path=file_path) #self.proteins.append(P) self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return", "data_url=data_url) except sqlalchemy.orm.exc.NoResultFound: return Exception(\"{} does not exist in RCSB database\".format(name)) if len(P.residues)", "def load_project(self, config_path): proj = Project(Path(config_path)) self.projects[proj.get_name()] = proj''' old = '''class TPP_Engine:", "\".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P def get_config(self): config =", "# self.proteins.append(P) #self.E.update_static_total_pairs_table((P.get_heatmap_data_centroid())) return P else: return Exception(\"{} is empty\".format(P.name)) elif raw_data !=", "/ Path(\"top_pro_pack\") self.projects = {} self.exclude_backbone = False self.distance_cutoff = 6 def add_protein(self,", "Exception(e) if len(P.residues) > 0: P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) # self.proteins.append(P) #self.E.update_static_total_pairs_table(P.get_heatmap_data_centroid()) return P else: return", "def get_config(self): config = { \"name\": self.name, \"pdb_path\": Path(self.pdb_path).__str__(), \"json_path\": Path(self.json_path).__str__(), \"exclude_backbone\": self.exclude_backbone,", "0 and Path(file_path).suffix != \".json\": P.generate_centroid_cliques(distance_cutoff=self.distance_cutoff) else: return Exception(\"{} is empty\".format(P.name)) return P", "self.ignore_path.__str__(), \"exclude_backbone\": self.exclude_backbone, \"distance_cutoff\": self.distance_cutoff, \"pdb_links\": self.pdb_links, \"json_links\": self.json_links, \"ignore_links\": self.ignore_links } return", "= config[\"exclude_backbone\"] self.name = config[\"name\"] self.pdb_path = Path(config[\"pdb_path\"]) self.json_path = Path(config[\"json_path\"]) self.pdb_links =", "file in config[\"ignored_paths\"] ] self.ignore_links = {} if not self.pdb_path.is_dir(): self.pdb_path.mkdir(parents=True) if not", "a valid {} file\".format(file_path.suffix)) def load_protein(self, name, priority=\"pdb\"): if priority == \"pdb\": if", "else: print(\"All processing attempts failed for {}, check provided info and try again\".format(name))'''", "os.makedirs(project_path) self.projects[name] = [] if proteins is not None: self.add_dataset(name, proteins) print(\"Project {}", "Path(json_path).__str__(), \"exclude_backbone\": exclude_backbone, \"distance_cutoff\": distance_cutoff, \"ignored_paths\": [ Path(file).__str__() for file in ignored_paths ]", "self.config_path = Path(config_path) self.proteins = {} def generate_default_ids(self): return [f.stem if f not", "print(\"top_pro_pack data files located at {}\".format(Path.home() / Path(\"top_pro_pack/bin\"))) self.base_lib_path = Path.home() / Path(\"top_pro_pack\")", "not in list(self.list_pdbs()): self.pdb_links.pop(pdb_name, None) for json_name in self.json_links: if self.json_links.get(json_name) not in", "def create_new_project(self, name=\"project_{}\", exclude_backbone=False, distance_cutoff=6, proteins=None): if name == \"project_{}\": name = name.format(len(self.projects)+1)" ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "client): \"\"\"Echo machine can be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created", "400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a task that", "be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status =", "task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks, stop one", "this file except in compliance with the License. # You may obtain a", "'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"], 'state':", "client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state':", "create_app # # CONSTANTS AND DEFINITIONS # # # CODE # @pytest.fixture def", "= client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop", "# @pytest.fixture def client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as client: yield client", "stop one and check reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 =", "ANY KIND, either express or implied. # See the License for the specific", "def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp = client.post( '/v1/tasks',", "task that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 #", "\"\"\"API returns 404 for a task that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id')", "task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"],", "assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert", "== [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining ==", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code", "resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId'] assert", "client): \"\"\"Start two tasks and expect them to be running\"\"\" resp_create = client.post('/v1/tasks',", "import create_app # # CONSTANTS AND DEFINITIONS # # # CODE # @pytest.fixture", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "expect them to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create", "assert task_status['state'] # just anything assert resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId']", "'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"], 'state': 'running'}]", "sleep 5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests", "# CONSTANTS AND DEFINITIONS # # # CODE # @pytest.fixture def client(): \"\"\"Create", "OF ANY KIND, either express or implied. # See the License for the", "assert resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just anything", "Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp", "api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\"", "\"\"\"Query API version\"\"\" resp = client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info", "API tests \"\"\" # # IMPORTS # import pytest from starlette.testclient import TestClient", "Corp. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "test client\"\"\" with TestClient(create_app()) as client: yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query", "client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'})", "{ 'machine': 'echo', 'parameters': \"\"\" echo Machine starting sleep 5 echo Machine stopping", "api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid() class TestApiV1:", "= client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining", "'echo', 'parameters': \"\"\" echo Machine starting sleep 5 echo Machine stopping \"\"\"} def", "assert '/' in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE", "yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/') api_info", "= client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId':", "as client: yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp =", "def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect them to be running\"\"\" resp_create", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "@pytest.fixture def client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as client: yield client #", "client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "resp.json() assert api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0]", "stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect", "task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"]", "# client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/') api_info = resp.json()", "schema) requests are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code ==", "# # # CODE # @pytest.fixture def client(): \"\"\"Create test client\"\"\" with TestClient(create_app())", "client): \"\"\"Start two tasks, stop one and check reported task lists\"\"\" task_1 =", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "a task that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404", "task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status", "'/' in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "= resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code == 201 assert", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/') api_info =", "# limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\"", "for a task that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code ==", "required by applicable law or agreed to in writing, software # distributed under", "resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 =", "!= task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks, stop one and", "applicable law or agreed to in writing, software # distributed under the License", "== task_created['taskId'] assert task_status['state'] # just anything assert resp_stop.status_code == 200 assert stop_status['taskId']", "== [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] !=", "them to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create =", "resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId':", "or agreed to in writing, software # distributed under the License is distributed", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "[ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"]", "task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start", "<gh_stars>0 # Copyright 2021 IBM Corp. # # Licensed under the Apache License,", "5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are", "assert resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId'] ==", "resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected() def", "task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}')", "requests are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400", "specific language governing permissions and # limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\"", "one and check reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks',", "404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted and stopped\"\"\"", "governing permissions and # limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "CODE # @pytest.fixture def client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as client: yield", "writing, software # distributed under the License is distributed on an \"AS IS\"", "class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\"", "client: yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/')", "assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "License. # You may obtain a copy of the License at # #", "test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect them to be running\"\"\" resp_create =", "IBM Corp. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "assert resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client):", "json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list =", "task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running", "compliance with the License. # You may obtain a copy of the License", "assert resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be", "to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks',", "Machine starting sleep 5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong", "starlette.testclient import TestClient from task_runner.api.star_app import create_app # # CONSTANTS AND DEFINITIONS #", "'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/' in schema_info", "test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a task that is not", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\" echo Machine starting sleep", "anything assert resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self,", "= client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self,", "json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop')", "api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/'", "'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two", "TestClient from task_runner.api.star_app import create_app # # CONSTANTS AND DEFINITIONS # # #", "== 200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just anything assert resp_stop.status_code", "client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state':", "not use this file except in compliance with the License. # You may", "test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\")", "task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json()", "stop_status = resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code == 200", "task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/')", "'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks,", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo',", "and check reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json()", "task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop')", "api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] == 'task_runner'", "\"\"\"Tests for API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\" echo Machine", "pytest from starlette.testclient import TestClient from task_runner.api.star_app import create_app # # CONSTANTS AND", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "machine can be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json()", "200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks", "json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running ==", "client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0] assert", "IMPORTS # import pytest from starlette.testclient import TestClient from task_runner.api.star_app import create_app #", "\"\"\"Start two tasks and expect them to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE)", "client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'},", "# you may not use this file except in compliance with the License.", "def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks',", "client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client):", "agreed to in writing, software # distributed under the License is distributed on", "import TestClient from task_runner.api.star_app import create_app # # CONSTANTS AND DEFINITIONS # #", "= resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status =", "test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters':", "test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a task that is not existing\"\"\" resp", "# just anything assert resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped()", "task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just", "(the \"License\"); # you may not use this file except in compliance with", "just anything assert resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def", "{'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"], 'state': 'running'}] #", "def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a task that is not existing\"\"\"", "test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect them to be running\"\"\"", "the specific language governing permissions and # limitations under the License. # pylint:disable=redefined-outer-name,no-self-use", "resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a", "# Unless required by applicable law or agreed to in writing, software #", "= client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine", "resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running =", "by applicable law or agreed to in writing, software # distributed under the", "2021 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}]", "client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code ==", "from starlette.testclient import TestClient from task_runner.api.star_app import create_app # # CONSTANTS AND DEFINITIONS", "# test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect them to be", "client): \"\"\"API returns 404 for a task that is not existing\"\"\" resp =", "= resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId':", "client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can", "== 201 assert task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId'] assert", "== task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect them", "file except in compliance with the License. # You may obtain a copy", "'min_version' in api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for", "tests \"\"\" # # IMPORTS # import pytest from starlette.testclient import TestClient from", "task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks, stop one and check", "tasks, stop one and check reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2", "and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status", "= { 'machine': 'echo', 'parameters': \"\"\" echo Machine starting sleep 5 echo Machine", "resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted", "= client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code", "License for the specific language governing permissions and # limitations under the License.", "# # CODE # @pytest.fixture def client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as", "two tasks, stop one and check reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json()", "to in writing, software # distributed under the License is distributed on an", "the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\" # # IMPORTS", "test_remove_running_task(self, client): \"\"\"Start two tasks, stop one and check reported task lists\"\"\" task_1", "schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE = { 'machine':", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "two tasks and expect them to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\" echo Machine starting sleep 5", "task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [", "and expect them to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json()", "can be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status", "or implied. # See the License for the specific language governing permissions and", "Copyright 2021 IBM Corp. # # Licensed under the Apache License, Version 2.0", "task_created['taskId'] assert task_status['state'] # just anything assert resp_stop.status_code == 200 assert stop_status['taskId'] ==", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId']", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "schema_info = resp.json() assert api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version'", "def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/') api_info = resp.json() resp =", "\"\"\" Task Runner API tests \"\"\" # # IMPORTS # import pytest from", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "language governing permissions and # limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task", "= resp.json() assert api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version' in", "'machine': 'echo', 'parameters': \"\"\" echo Machine starting sleep 5 echo Machine stopping \"\"\"}", "is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404() def", "task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just anything assert resp_stop.status_code == 200 assert", "'/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API", "assert resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for", "tasks and expect them to be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 =", "\"\"\"Echo machine can be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created =", "'state': 'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"], 'state': 'running'}] # test_remove_running_task() #", "= resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] == 'task_runner' assert", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "API version\"\"\" resp = client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info =", "in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE = {", "task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running()", "task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code == 201", "# test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a task that is", "task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two tasks and expect them to", "\"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp = client.post(", "client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list", "client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state':", "resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert", "task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId':", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "def test_remove_running_task(self, client): \"\"\"Start two tasks, stop one and check reported task lists\"\"\"", "you may not use this file except in compliance with the License. #", "returns 404 for a task that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert", "test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted and stopped\"\"\" resp_create =", "echo Machine starting sleep 5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid", "that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404()", "task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining", "resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] == 'task_runner' assert 'version' in", "'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404", "= resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running", "use this file except in compliance with the License. # You may obtain", "not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self,", "TestClient(create_app()) as client: yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp", "client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code ==", "under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\" # #", "rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected()", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json()", "client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as client: yield client # client() def", "version\"\"\" resp = client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json()", "pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\" # # IMPORTS # import pytest", "in api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests for API", "= client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert", "2.0 (the \"License\"); # you may not use this file except in compliance", "resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId']", "= client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name']", "assert task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] #", "for the specific language governing permissions and # limitations under the License. #", "{'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] #", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo", "test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks, stop one and check reported task", "'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self,", "assert api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert", "# # Unless required by applicable law or agreed to in writing, software", "express or implied. # See the License for the specific language governing permissions", "TestApiV1: \"\"\"Tests for API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\" echo", "either express or implied. # See the License for the specific language governing", "ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\" echo Machine starting sleep 5 echo", "== 200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start two", "CONSTANTS AND DEFINITIONS # # # CODE # @pytest.fixture def client(): \"\"\"Create test", "check reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "\"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert", "starting sleep 5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema)", "'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def", "# # CONSTANTS AND DEFINITIONS # # # CODE # @pytest.fixture def client():", "# CODE # @pytest.fixture def client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as client:", "client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop')", "test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE)", "DEFINITIONS # # # CODE # @pytest.fixture def client(): \"\"\"Create test client\"\"\" with", "the License. # You may obtain a copy of the License at #", "resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json()", "{'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client):", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "== 'task_runner' assert 'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/' in", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "= client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"],", "200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just anything assert resp_stop.status_code ==", "= client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json()", "resp = client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert", "resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just anything assert", "import pytest from starlette.testclient import TestClient from task_runner.api.star_app import create_app # # CONSTANTS", "404 for a task that is not existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code", "[ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [", "# pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\" # # IMPORTS # import", "assert task_status['taskId'] == task_created['taskId'] assert task_status['state'] # just anything assert resp_stop.status_code == 200", "with the License. # You may obtain a copy of the License at", "'parameters': \"\"\" echo Machine starting sleep 5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self,", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp =", "assert task_1[\"taskId\"] != task_2[\"taskId\"] # test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks, stop", "Runner API tests \"\"\" # # IMPORTS # import pytest from starlette.testclient import", "\"\"\" # # IMPORTS # import pytest from starlette.testclient import TestClient from task_runner.api.star_app", "law or agreed to in writing, software # distributed under the License is", "# import pytest from starlette.testclient import TestClient from task_runner.api.star_app import create_app # #", "the License for the specific language governing permissions and # limitations under the", "resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [", "resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"],", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] == 'task_runner' assert 'version'", "# IMPORTS # import pytest from starlette.testclient import TestClient from task_runner.api.star_app import create_app", "= resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running ==", "def client(): \"\"\"Create test client\"\"\" with TestClient(create_app()) as client: yield client # client()", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId'] # test_echo_machine_is_started_and_stopped() def test_parallel_tasks_are_running(self, client): \"\"\"Start", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json() resp_stop =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters':", "be running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE)", "client.get('/') api_info = resp.json() resp = client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] ==", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "\"\"\" echo Machine starting sleep 5 echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client):", "existing\"\"\" resp = client.get('/v1/tasks/some-unspecified-id') assert resp.status_code == 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client):", "resp_status.json() resp_stop = client.post(f'/v1/tasks/{task_created[\"taskId\"]}/stop') stop_status = resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId']", "# Copyright 2021 IBM Corp. # # Licensed under the Apache License, Version", "from task_runner.api.star_app import create_app # # CONSTANTS AND DEFINITIONS # # # CODE", "task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"], 'state': 'running'}] # test_remove_running_task()", "AND DEFINITIONS # # # CODE # @pytest.fixture def client(): \"\"\"Create test client\"\"\"", "# test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted and stopped\"\"\" resp_create", "resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status = resp_status.json()", "== 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns 404 for a task", "json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 # test_invalid_request_is_rejected() def test_not_found_return_code_is_404(self, client): \"\"\"API returns", "with TestClient(create_app()) as client: yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\"", "limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\" #", "# # IMPORTS # import pytest from starlette.testclient import TestClient from task_runner.api.star_app import", "lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop')", "assert 'min_version' in api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid() class TestApiV1: \"\"\"Tests", "client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state':", "{'taskId': task_1[\"taskId\"], 'state': 'running'}, {'taskId': task_2[\"taskId\"], 'state': 'running'}] assert task_list_remaining == [ {'taskId':", "permissions and # limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "# test_parallel_tasks_are_running() def test_remove_running_task(self, client): \"\"\"Start two tasks, stop one and check reported", "License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests \"\"\" # # IMPORTS #", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "(wrong schema) requests are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code", "json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert", "Task Runner API tests \"\"\" # # IMPORTS # import pytest from starlette.testclient", "client\"\"\" with TestClient(create_app()) as client: yield client # client() def test_api_info_responses_are_valid(client): \"\"\"Query API", "task_status['state'] # just anything assert resp_stop.status_code == 200 assert stop_status['taskId'] == task_created['taskId'] #", "client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining =", "are rejected\"\"\" resp = client.post( '/v1/tasks', json={'without-machine-parameters': 'request-is-invalid'}) assert resp.status_code == 400 #", "= resp_stop.json() assert resp_create.status_code == 201 assert task_created['taskId'] assert resp_status.status_code == 200 assert", "'running'}] assert task_list_remaining == [ {'taskId': task_2[\"taskId\"], 'state': 'running'}] # test_remove_running_task() # TestApiV1", "== 404 # test_not_found_return_code_is_404() def test_echo_machine_is_started_and_stopped(self, client): \"\"\"Echo machine can be staeted and", "task_runner.api.star_app import create_app # # CONSTANTS AND DEFINITIONS # # # CODE #", "stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}') task_status =", "= client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2 = resp_create.json() resp_list = client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop')", "task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"], 'state': 'running'},", "client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"],", "in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid() class", "= client.get('/v1/tasks/') task_list_running = resp_list.json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.post(f'/v1/tasks/{task_2[\"taskId\"]}/stop') assert task_list_running == [ {'taskId': task_1[\"taskId\"],", "staeted and stopped\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_created = resp_create.json() resp_status = client.get(f'/v1/tasks/{task_created[\"taskId\"]}')", "\"\"\"Create test client\"\"\" with TestClient(create_app()) as client: yield client # client() def test_api_info_responses_are_valid(client):", "\"\"\"Start two tasks, stop one and check reported task lists\"\"\" task_1 = client.post('/v1/tasks',", "and # limitations under the License. # pylint:disable=redefined-outer-name,no-self-use \"\"\" Task Runner API tests", "201 assert task_created['taskId'] assert resp_status.status_code == 200 assert task_status['taskId'] == task_created['taskId'] assert task_status['state']", "for API v1\"\"\" ECHO_MACHINE = { 'machine': 'echo', 'parameters': \"\"\" echo Machine starting", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "assert 'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/' in schema_info #", "'version' in api_info['apis'][0] assert 'min_version' in api_info['apis'][0] assert '/' in schema_info # test_api_info_responses_are_valid()", "reported task lists\"\"\" task_1 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running =", "echo Machine stopping \"\"\"} def test_invalid_request_is_rejected(self, client): \"\"\"Invalid (wrong schema) requests are rejected\"\"\"", "client() def test_api_info_responses_are_valid(client): \"\"\"Query API version\"\"\" resp = client.get('/') api_info = resp.json() resp", "json=self.ECHO_MACHINE).json() task_2 = client.post('/v1/tasks', json=self.ECHO_MACHINE).json() task_list_running = client.get('/v1/tasks/').json() client.post(f'/v1/tasks/{task_1[\"taskId\"]}/stop') client.delete(f'/v1/tasks/{task_1[\"taskId\"]}') task_list_remaining = client.get('/v1/tasks/').json()", "running\"\"\" resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_1 = resp_create.json() resp_create = client.post('/v1/tasks', json=self.ECHO_MACHINE) task_2", "= client.get(f\"{api_info['apis'][0]['root']}/schema\") schema_info = resp.json() assert api_info['name'] == 'task_runner' assert 'version' in api_info['apis'][0]" ]
[ "match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION:", "a matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP", "match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise", "match: return self._parse_result(True, match) match = self.not_ok.match(text) if match: return self._parse_result(False, match) if", "for TAP files and lines.\"\"\" # ok and not ok share most of", "return self._parse_plan(match) match = self.bail.match(text) if match: return Bail(match.group('reason')) match = self.version.match(text) if", "Unknown() def _parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive", "Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version')) if version", "# Optional directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\", "def parse_line(self, text): \"\"\"Parse a line into whatever TAP category it belongs.\"\"\" match", "#. \\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional", "directives appear with the plan details. \\#? # Optional directive marker. \\s* #", "SKIP directives are allowed in the plan. if directive.text and not directive.skip: return", "= self.plan.match(text) if match: return self._parse_plan(match) match = self.bail.match(text) if match: return Bail(match.group('reason'))", "whitespace. (?P<directive>.*) # Optional directive text. \"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE)", "# Match the plan details. [^#]* # Consume any non-hash character to confirm", "confirm only # directives appear with the plan details. \\#? # Optional directive", "if match: return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if", "+ result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\"", "match): \"\"\"Parse a matching result line into a result instance.\"\"\" return Result( ok,", "not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match", "before #. \\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) #", "(?P<description>[^#]*) # Optional description before #. \\#? # Optional directive marker. \\s* #", "re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse a TAP file", "re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace.", "directive text. \"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok'", "plan details. [^#]* # Consume any non-hash character to confirm only # directives", "= self.not_ok.match(text) if match: return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match =", "Optional whitespace. (?P<number>\\d*) # Optional test number. \\s* # Optional whitespace. (?P<description>[^#]*) #", "# Optional description before #. \\#? # Optional directive marker. \\s* # Optional", "Bail, Diagnostic, Plan, Result, Unknown, Version class Parser(object): \"\"\"A parser for TAP files", "+ result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details. [^#]*", "a matching result line into a result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(),", "directives are allowed in the plan. if directive.text and not directive.skip: return Unknown()", "lines.\"\"\" # ok and not ok share most of the same characteristics. result_base", "match): \"\"\"Parse a matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) #", "tap.directive import Directive from tap.line import Bail, Diagnostic, Plan, Result, Unknown, Version class", "return self._parse_result(True, match) match = self.not_ok.match(text) if match: return self._parse_result(False, match) if self.diagnostic.match(text):", "tap.line import Bail, Diagnostic, Plan, Result, Unknown, Version class Parser(object): \"\"\"A parser for", "files and lines.\"\"\" # ok and not ok share most of the same", "into a result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match):", "result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+)", "\"\"\"Parse a matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) # Only", "self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\" expected_tests =", "Version class Parser(object): \"\"\"A parser for TAP files and lines.\"\"\" # ok and", "parse_file(self, filename): \"\"\"Parse a TAP file and determine what each line in the", "# Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13", "= Directive(match.group('directive')) # Only SKIP directives are allowed in the plan. if directive.text", "Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail", "ok and not ok share most of the same characteristics. result_base = r\"\"\"", "def _parse_result(self, ok, match): \"\"\"Parse a matching result line into a result instance.\"\"\"", "\\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive", "\"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional", "Optional test number. \\s* # Optional whitespace. (?P<description>[^#]*) # Optional description before #.", "a result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version", "an error to explicitly specify ' 'any version lower than 13.') return Version(version)", "_parse_result(self, ok, match): \"\"\"Parse a matching result line into a result instance.\"\"\" return", "directive.text and not directive.skip: return Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok, match):", "diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*)", "whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION", "# Optional test number. \\s* # Optional whitespace. (?P<description>[^#]*) # Optional description before", "= re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the", "(?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse a TAP file and determine", "file is. This is a generator method that will yield each parsed line.", "expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP directives are allowed in", "a generator method that will yield each parsed line. The filename is assumed", "TAP category it belongs.\"\"\" match = self.ok.match(text) if match: return self._parse_result(True, match) match", "# Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\" ok = re.compile(r'^ok' +", "in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line into whatever TAP", "directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE)", "details. \\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional", "Directive from tap.line import Bail, Diagnostic, Plan, Result, Unknown, Version class Parser(object): \"\"\"A", "match = self.not_ok.match(text) if match: return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match", "any non-hash character to confirm only # directives appear with the plan details.", "parser for TAP files and lines.\"\"\" # ok and not ok share most", "the plan. if directive.text and not directive.skip: return Unknown() return Plan(expected_tests, directive) def", "match: return self._parse_plan(match) match = self.bail.match(text) if match: return Bail(match.group('reason')) match = self.version.match(text)", "(?P<number>\\d*) # Optional test number. \\s* # Optional whitespace. (?P<description>[^#]*) # Optional description", "match = self.bail.match(text) if match: return Bail(match.group('reason')) match = self.version.match(text) if match: return", "number. \\s* # Optional whitespace. (?P<description>[^#]*) # Optional description before #. \\#? #", "# Copyright (c) 2015, <NAME> import re from tap.directive import Directive from tap.line", "Result, Unknown, Version class Parser(object): \"\"\"A parser for TAP files and lines.\"\"\" #", "open(filename, 'r') as tap_file: for line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text):", "directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s*", "to confirm only # directives appear with the plan details. \\#? # Optional", "= 13 def parse_file(self, filename): \"\"\"Parse a TAP file and determine what each", "13 def parse_file(self, filename): \"\"\"Parse a TAP file and determine what each line", "Only SKIP directives are allowed in the plan. if directive.text and not directive.skip:", "import Directive from tap.line import Bail, Diagnostic, Plan, Result, Unknown, Version class Parser(object):", "ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details.", "re from tap.directive import Directive from tap.line import Bail, Diagnostic, Plan, Result, Unknown,", "the same characteristics. result_base = r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) # Optional", "# Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version", "match: return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if match:", "'r') as tap_file: for line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse", "self._parse_plan(match) match = self.bail.match(text) if match: return Bail(match.group('reason')) match = self.version.match(text) if match:", "match: return Bail(match.group('reason')) match = self.version.match(text) if match: return self._parse_version(match) return Unknown() def", "match) if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if match: return self._parse_plan(match) match", "re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan", "tap_file: for line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line", "This is a generator method that will yield each parsed line. The filename", "into whatever TAP category it belongs.\"\"\" match = self.ok.match(text) if match: return self._parse_result(True,", "is. This is a generator method that will yield each parsed line. The", "return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version')) if", "version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse a", "return Diagnostic(text) match = self.plan.match(text) if match: return self._parse_plan(match) match = self.bail.match(text) if", "The filename is assumed to exist. \"\"\" with open(filename, 'r') as tap_file: for", "Optional directive text. \"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\", "Optional directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out!", "ValueError('It is an error to explicitly specify ' 'any version lower than 13.')", "\\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\" ok = re.compile(r'^ok'", "out! \\s* # Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version =", "Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It", "version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to", "marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\" ok =", "# Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#')", "exist. \"\"\" with open(filename, 'r') as tap_file: for line in tap_file: yield self.parse_line(line.rstrip())", "whitespace. (?P<description>[^#]*) # Optional description before #. \\#? # Optional directive marker. \\s*", "bail = re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*) # Optional reason.", "Parser(object): \"\"\"A parser for TAP files and lines.\"\"\" # ok and not ok", "the plan details. [^#]* # Consume any non-hash character to confirm only #", "TAP files and lines.\"\"\" # ok and not ok share most of the", "TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse a TAP file and determine what", "to exist. \"\"\" with open(filename, 'r') as tap_file: for line in tap_file: yield", "^1..(?P<expected>\\d+) # Match the plan details. [^#]* # Consume any non-hash character to", "return self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\" expected_tests", "and lines.\"\"\" # ok and not ok share most of the same characteristics.", "self.version.match(text) if match: return self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse a matching", "ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE)", "reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self,", "< self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to explicitly specify ' 'any version", "same characteristics. result_base = r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) # Optional test", "plan. if directive.text and not directive.skip: return Unknown() return Plan(expected_tests, directive) def _parse_result(self,", "\"\"\"Parse a TAP file and determine what each line in the file is.", "generator method that will yield each parsed line. The filename is assumed to", "Diagnostic(text) match = self.plan.match(text) if match: return self._parse_plan(match) match = self.bail.match(text) if match:", "= r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) # Optional test number. \\s* #", "Plan, Result, Unknown, Version class Parser(object): \"\"\"A parser for TAP files and lines.\"\"\"", "that will yield each parsed line. The filename is assumed to exist. \"\"\"", "_parse_version(self, match): version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an", "\"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename):", "if match: return self._parse_plan(match) match = self.bail.match(text) if match: return Bail(match.group('reason')) match =", "= int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP directives are allowed in the", "in the file is. This is a generator method that will yield each", "# Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text.", "not directive.skip: return Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse a", "belongs.\"\"\" match = self.ok.match(text) if match: return self._parse_result(True, match) match = self.not_ok.match(text) if", "^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version", "Optional whitespace. (?P<description>[^#]*) # Optional description before #. \\#? # Optional directive marker.", "text): \"\"\"Parse a line into whatever TAP category it belongs.\"\"\" match = self.ok.match(text)", "line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP directives are allowed", "line into a result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self,", "plan line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP directives are", "if match: return self._parse_result(True, match) match = self.not_ok.match(text) if match: return self._parse_result(False, match)", "2015, <NAME> import re from tap.directive import Directive from tap.line import Bail, Diagnostic,", "tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line into whatever TAP category", "re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) #", "whitespace. (?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail =", "Bail(match.group('reason')) match = self.version.match(text) if match: return self._parse_version(match) return Unknown() def _parse_plan(self, match):", "character to confirm only # directives appear with the plan details. \\#? #", "and determine what each line in the file is. This is a generator", "(?P<directive>.*) # Optional directive text. \"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok", "(?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\"", "Unknown, Version class Parser(object): \"\"\"A parser for TAP files and lines.\"\"\" # ok", "with open(filename, 'r') as tap_file: for line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self,", "Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\" ok = re.compile(r'^ok' + result_base,", "most of the same characteristics. result_base = r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*)", "if directive.text and not directive.skip: return Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok,", "self.not_ok.match(text) if match: return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text)", "if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if match: return self._parse_plan(match) match =", "match = self.plan.match(text) if match: return self._parse_plan(match) match = self.bail.match(text) if match: return", "Match the plan details. [^#]* # Consume any non-hash character to confirm only", "what each line in the file is. This is a generator method that", "matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP directives", "assumed to exist. \"\"\" with open(filename, 'r') as tap_file: for line in tap_file:", "description before #. \\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*)", "and not ok share most of the same characteristics. result_base = r\"\"\" \\s*", "determine what each line in the file is. This is a generator method", "filename is assumed to exist. \"\"\" with open(filename, 'r') as tap_file: for line", "a TAP file and determine what each line in the file is. This", "line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line into whatever", "self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if match: return self._parse_plan(match)", "version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to explicitly specify ' 'any", "text. \"\"\", re.VERBOSE) diagnostic = re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s* #", "self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line into whatever TAP category it belongs.\"\"\"", "category it belongs.\"\"\" match = self.ok.match(text) if match: return self._parse_result(True, match) match =", "not ok share most of the same characteristics. result_base = r\"\"\" \\s* #", "the file is. This is a generator method that will yield each parsed", "match = self.version.match(text) if match: return self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse", "directive.skip: return Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse a matching", "= self.bail.match(text) if match: return Bail(match.group('reason')) match = self.version.match(text) if match: return self._parse_version(match)", "each parsed line. The filename is assumed to exist. \"\"\" with open(filename, 'r')", "from tap.line import Bail, Diagnostic, Plan, Result, Unknown, Version class Parser(object): \"\"\"A parser", "yield each parsed line. The filename is assumed to exist. \"\"\" with open(filename,", "plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details. [^#]* # Consume any", "int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to explicitly specify", "details. [^#]* # Consume any non-hash character to confirm only # directives appear", "\\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE) diagnostic =", "Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def", "for line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line into", "with the plan details. \\#? # Optional directive marker. \\s* # Optional whitespace.", "non-hash character to confirm only # directives appear with the plan details. \\#?", "Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$')", "match: return self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\"", "it belongs.\"\"\" match = self.ok.match(text) if match: return self._parse_result(True, match) match = self.not_ok.match(text)", "TAP file and determine what each line in the file is. This is", "appear with the plan details. \\#? # Optional directive marker. \\s* # Optional", "self.plan.match(text) if match: return self._parse_plan(match) match = self.bail.match(text) if match: return Bail(match.group('reason')) match", "version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse a TAP file and", "parse_line(self, text): \"\"\"Parse a line into whatever TAP category it belongs.\"\"\" match =", "match): version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error", "from tap.directive import Directive from tap.line import Bail, Diagnostic, Plan, Result, Unknown, Version", "= re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details. [^#]* # Consume any non-hash", "re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE)", "return Unknown() def _parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\" expected_tests = int(match.group('expected'))", "yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a line into whatever TAP category it", "= re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\",", "ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version')) if version <", "(c) 2015, <NAME> import re from tap.directive import Directive from tap.line import Bail,", "def parse_file(self, filename): \"\"\"Parse a TAP file and determine what each line in", "\"\"\" with open(filename, 'r') as tap_file: for line in tap_file: yield self.parse_line(line.rstrip()) def", "return Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse a matching result line into", "whatever TAP category it belongs.\"\"\" match = self.ok.match(text) if match: return self._parse_result(True, match)", "= self.version.match(text) if match: return self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse a", "<NAME> import re from tap.directive import Directive from tap.line import Bail, Diagnostic, Plan,", "method that will yield each parsed line. The filename is assumed to exist.", "re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse", "= re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*) #", "Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\",", "line in the file is. This is a generator method that will yield", "text. \"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' +", "directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\" ok", "is a generator method that will yield each parsed line. The filename is", "self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if match: return self._parse_plan(match) match = self.bail.match(text)", "each line in the file is. This is a generator method that will", "and not directive.skip: return Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse", "filename): \"\"\"Parse a TAP file and determine what each line in the file", "in the plan. if directive.text and not directive.skip: return Unknown() return Plan(expected_tests, directive)", "re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details. [^#]* # Consume any non-hash character", "import Bail, Diagnostic, Plan, Result, Unknown, Version class Parser(object): \"\"\"A parser for TAP", "\\s* # Optional whitespace. (?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP", "if match: return Bail(match.group('reason')) match = self.version.match(text) if match: return self._parse_version(match) return Unknown()", "test number. \\s* # Optional whitespace. (?P<description>[^#]*) # Optional description before #. \\#?", "Diagnostic, Plan, Result, Unknown, Version class Parser(object): \"\"\"A parser for TAP files and", "the plan details. \\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*)", "are allowed in the plan. if directive.text and not directive.skip: return Unknown() return", "def _parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive =", "r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) # Optional test number. \\s* # Optional", "Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse a matching result line", "allowed in the plan. if directive.text and not directive.skip: return Unknown() return Plan(expected_tests,", "whitespace. (?P<number>\\d*) # Optional test number. \\s* # Optional whitespace. (?P<description>[^#]*) # Optional", "marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\", re.VERBOSE) diagnostic", "as tap_file: for line in tap_file: yield self.parse_line(line.rstrip()) def parse_line(self, text): \"\"\"Parse a", "Copyright (c) 2015, <NAME> import re from tap.directive import Directive from tap.line import", "Consume any non-hash character to confirm only # directives appear with the plan", "= re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION = 13 def parse_file(self, filename): \"\"\"Parse a TAP", "match = self.ok.match(text) if match: return self._parse_result(True, match) match = self.not_ok.match(text) if match:", "\"\"\"Parse a line into whatever TAP category it belongs.\"\"\" match = self.ok.match(text) if", "matching result line into a result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive')))", "if match: return self._parse_version(match) return Unknown() def _parse_plan(self, match): \"\"\"Parse a matching plan", "<reponame>cans/tappy-pkg # Copyright (c) 2015, <NAME> import re from tap.directive import Directive from", "class Parser(object): \"\"\"A parser for TAP files and lines.\"\"\" # ok and not", "_parse_plan(self, match): \"\"\"Parse a matching plan line.\"\"\" expected_tests = int(match.group('expected')) directive = Directive(match.group('directive'))", "Directive(match.group('directive')) # Only SKIP directives are allowed in the plan. if directive.text and", "# Consume any non-hash character to confirm only # directives appear with the", "import re from tap.directive import Directive from tap.line import Bail, Diagnostic, Plan, Result,", "result line into a result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def", "is assumed to exist. \"\"\" with open(filename, 'r') as tap_file: for line in", "= self.ok.match(text) if match: return self._parse_result(True, match) match = self.not_ok.match(text) if match: return", "# ok and not ok share most of the same characteristics. result_base =", "match) match = self.not_ok.match(text) if match: return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text)", "of the same characteristics. result_base = r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) #", "Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) # Optional directive text. \"\"\"", "self.ok.match(text) if match: return self._parse_result(True, match) match = self.not_ok.match(text) if match: return self._parse_result(False,", "# Optional whitespace. (?P<description>[^#]*) # Optional description before #. \\#? # Optional directive", "file and determine what each line in the file is. This is a", "self._parse_result(True, match) match = self.not_ok.match(text) if match: return self._parse_result(False, match) if self.diagnostic.match(text): return", "return self._parse_result(False, match) if self.diagnostic.match(text): return Diagnostic(text) match = self.plan.match(text) if match: return", "\"\"\"Parse a matching result line into a result instance.\"\"\" return Result( ok, match.group('number'),", "\"\"\"A parser for TAP files and lines.\"\"\" # ok and not ok share", "ok share most of the same characteristics. result_base = r\"\"\" \\s* # Optional", "# Optional whitespace. (?P<number>\\d*) # Optional test number. \\s* # Optional whitespace. (?P<description>[^#]*)", "result_base = r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) # Optional test number. \\s*", "= int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to explicitly", "is an error to explicitly specify ' 'any version lower than 13.') return", "Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse a matching result line into a", "will yield each parsed line. The filename is assumed to exist. \"\"\" with", "(?P<reason>.*) # Optional reason. \"\"\", re.VERBOSE) version = re.compile(r'^TAP version (?P<version>\\d+)$') TAP_MINIMUM_DECLARED_VERSION =", "self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to explicitly specify ' 'any version lower", "= re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan", "share most of the same characteristics. result_base = r\"\"\" \\s* # Optional whitespace.", "\\s* # Optional whitespace. (?P<number>\\d*) # Optional test number. \\s* # Optional whitespace.", "characteristics. result_base = r\"\"\" \\s* # Optional whitespace. (?P<number>\\d*) # Optional test number.", "a line into whatever TAP category it belongs.\"\"\" match = self.ok.match(text) if match:", "def _parse_version(self, match): version = int(match.group('version')) if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is", "instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version = int(match.group('version'))", "raise ValueError('It is an error to explicitly specify ' 'any version lower than", "\"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base,", "# Only SKIP directives are allowed in the plan. if directive.text and not", "plan details. \\#? # Optional directive marker. \\s* # Optional whitespace. (?P<directive>.*) #", "line. The filename is assumed to exist. \"\"\" with open(filename, 'r') as tap_file:", "if version < self.TAP_MINIMUM_DECLARED_VERSION: raise ValueError('It is an error to explicitly specify '", "re.compile(r'^ok' + result_base, re.VERBOSE) not_ok = re.compile(r'^not\\ ok' + result_base, re.VERBOSE) plan =", "parsed line. The filename is assumed to exist. \"\"\" with open(filename, 'r') as", "result_base, re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details. [^#]* #", "directive) def _parse_result(self, ok, match): \"\"\"Parse a matching result line into a result", "int(match.group('expected')) directive = Directive(match.group('directive')) # Only SKIP directives are allowed in the plan.", "only # directives appear with the plan details. \\#? # Optional directive marker.", "[^#]* # Consume any non-hash character to confirm only # directives appear with", "directive = Directive(match.group('directive')) # Only SKIP directives are allowed in the plan. if", "return Bail(match.group('reason')) match = self.version.match(text) if match: return self._parse_version(match) return Unknown() def _parse_plan(self,", "result instance.\"\"\" return Result( ok, match.group('number'), match.group('description').strip(), Directive(match.group('directive'))) def _parse_version(self, match): version =", "# Optional directive text. \"\"\" ok = re.compile(r'^ok' + result_base, re.VERBOSE) not_ok =", "# directives appear with the plan details. \\#? # Optional directive marker. \\s*", "re.compile(r'^#') bail = re.compile(r\"\"\" ^Bail\\ out! \\s* # Optional whitespace. (?P<reason>.*) # Optional", "self.bail.match(text) if match: return Bail(match.group('reason')) match = self.version.match(text) if match: return self._parse_version(match) return", "return Unknown() return Plan(expected_tests, directive) def _parse_result(self, ok, match): \"\"\"Parse a matching result", "ok, match): \"\"\"Parse a matching result line into a result instance.\"\"\" return Result(", "line into whatever TAP category it belongs.\"\"\" match = self.ok.match(text) if match: return", "\\s* # Optional whitespace. (?P<description>[^#]*) # Optional description before #. \\#? # Optional", "re.VERBOSE) plan = re.compile(r\"\"\" ^1..(?P<expected>\\d+) # Match the plan details. [^#]* # Consume", "Optional description before #. \\#? # Optional directive marker. \\s* # Optional whitespace." ]
[ "for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v), k)) for k,", "i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v), k)) for k, v", "<filename>solutions/python3/811.py<gh_stars>10-100 class Solution: def subdomainVisits(self, cpdomains): counter = collections.Counter() for cpdomain in cpdomains:", "Solution: def subdomainVisits(self, cpdomains): counter = collections.Counter() for cpdomain in cpdomains: count, *domains", "def subdomainVisits(self, cpdomains): counter = collections.Counter() for cpdomain in cpdomains: count, *domains =", "cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v), k))", "subdomainVisits(self, cpdomains): counter = collections.Counter() for cpdomain in cpdomains: count, *domains = cpdomain.replace(\"", "count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return", "for cpdomain in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)):", "in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] +=", "= collections.Counter() for cpdomain in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i", "= cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v),", "collections.Counter() for cpdomain in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i in", "\",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v), k)) for", "counter = collections.Counter() for cpdomain in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for", "cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count)", "cpdomain in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])]", "class Solution: def subdomainVisits(self, cpdomains): counter = collections.Counter() for cpdomain in cpdomains: count,", "*domains = cpdomain.replace(\" \",\".\").split(\".\") for i in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\"", "cpdomains): counter = collections.Counter() for cpdomain in cpdomains: count, *domains = cpdomain.replace(\" \",\".\").split(\".\")", "range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v), k)) for k, v in counter.items()]", "in range(len(domains)): counter[\".\".join(domains[i:])] += int(count) return [\" \".join((str(v), k)) for k, v in" ]
[ "data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype ==", "unidecode import unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é': 'e', 'É':", "\"\"\" Created on Tue Nov 17 23:03:32 2020 @author: quipo \"\"\" import pandas", "{col: {'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó':", "'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo =", "Tue Nov 17 23:03:32 2020 @author: quipo \"\"\" import pandas as pd import", "for index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index, value", "joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios)", "str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN", "pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO']", "{'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú':", "return {col: {'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o',", "# -*- coding: utf-8 -*- \"\"\" Created on Tue Nov 17 23:03:32 2020", "'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip()", "= pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE'])", "barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...')", "'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú':", "data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value", "barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\"", "= ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios)", "'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER ==", "joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1)", "right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates()", "if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype ==", "coding: utf-8 -*- \"\"\" Created on Tue Nov 17 23:03:32 2020 @author: quipo", "= joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda", "2020 @author: quipo \"\"\" import pandas as pd import numpy as np import", "== 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO']", "= data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata", "#barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\"", "#data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2,", "as pd import numpy as np import re from unidecode import unidecode def", "= pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x)", "in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM',", "import re from unidecode import unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á':", "#print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER ==", "value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA',", "print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER']", "== \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda", "'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8')", "left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates()", "== 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar #dumies =", "data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x:", "barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False)", "data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data =", "'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x:", "if(x.dtype == \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'],", "value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index, value in barrios_geo.items():", "barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}',", "f'{value}', 'ESTA']=1 \"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False)", "in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO==", "pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8')", "re from unidecode import unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é':", "barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA',", "quipo \"\"\" import pandas as pd import numpy as np import re from", "'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}}", "'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False)", "on Tue Nov 17 23:03:32 2020 @author: quipo \"\"\" import pandas as pd", "data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else", "else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x)", "data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] =", "for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID',", "== \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype == \"str\")", "np import re from unidecode import unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a',", "data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata =", "data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN ==", "['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0", "x: x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']]", "index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index, value in", "index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE',", "joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1)", "data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\")", "-*- coding: utf-8 -*- \"\"\" Created on Tue Nov 17 23:03:32 2020 @author:", "== f'{value}', 'ESTA']=1 \"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0])", "\"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on =", "data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018]", "barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower()", "import numpy as np import re from unidecode import unidecode def diccionario_quitar_tildes(col): return", "pandas as pd import numpy as np import re from unidecode import unidecode", "'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1,", "1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar #dumies = pd.get_dummies(data_analizar.CLASE)", "#data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in", "'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar #dumies = pd.get_dummies(data_analizar.CLASE) joindata.to_csv('geoDataframe_temporal.csv',sep=';',encoding='utf8',index=False)", "unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i',", "x.str.strip() if(x.dtype == \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left',", "x: x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip()", "-*- \"\"\" Created on Tue Nov 17 23:03:32 2020 @author: quipo \"\"\" import", "data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\")", "pd import numpy as np import re from unidecode import unidecode def diccionario_quitar_tildes(col):", "data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0,", "x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) print(data.columns)", "import pandas as pd import numpy as np import re from unidecode import", "'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype", "from unidecode import unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é': 'e',", "\"\"\" import pandas as pd import numpy as np import re from unidecode", "else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO'])", "= data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering =", "\"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x:", "import unidecode def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í':", "joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2", "diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó':", "\"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else", "'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype ==", "'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1", "utf-8 -*- \"\"\" Created on Tue Nov 17 23:03:32 2020 @author: quipo \"\"\"", "#data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for", "joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE']", "def diccionario_quitar_tildes(col): return {col: {'á': 'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í':", "'a', 'Á': 'A','é': 'e', 'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u',", "23:03:32 2020 @author: quipo \"\"\" import pandas as pd import numpy as np", "'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x)", "data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) print(data.columns) data", "'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda", "data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data", "data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering", "'É': 'E','í': 'i', 'Í': 'I','ó': 'o', 'Ó': 'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo", "x: x.str.strip() if(x.dtype == \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering,", "barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER", "Nov 17 23:03:32 2020 @author: quipo \"\"\" import pandas as pd import numpy", "'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else", "joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x:", "numpy as np import re from unidecode import unidecode def diccionario_quitar_tildes(col): return {col:", "\"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index,", "x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'})", "data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER ==", "barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']]", "as np import re from unidecode import unidecode def diccionario_quitar_tildes(col): return {col: {'á':", "== 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar #dumies = pd.get_dummies(data_analizar.CLASE) joindata.to_csv('geoDataframe_temporal.csv',sep=';',encoding='utf8',index=False) #data_completo.to_csv('datos_proscesados.csv', encoding='utf-8')", "x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip() if(x.dtype", "'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER", "x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering", "barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for", "x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items():", "data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1", "'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data_analizar=data_completo", "joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar #dumies", "print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3", "else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering = data_clustering.apply(lambda x: x.str.strip()", "data = data.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1)", "=data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3)", "f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1) data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns)", "'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER", "joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar #dumies = pd.get_dummies(data_analizar.CLASE) joindata.to_csv('geoDataframe_temporal.csv',sep=';',encoding='utf8',index=False) #data_completo.to_csv('datos_proscesados.csv',", "how='left', left_on=['NOMBRE'], right_on = ['BARRIO']) joindata=joindata.rename(columns={'cluster':'CLUSTER'}) joindata['CLUSTER'] = joindata['CLUSTER'].fillna(3) joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE']", "0, 'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1)", "if(x.dtype == \"str\") else x) data_analizar=data_completo data_analizar=data_analizar[data_analizar['PERIODO']<=2018] data_clustering = pd.read_excel(\"../../Andres/cluster/clustering.xlsx\") data_clustering=data_clustering[['BARRIO','cluster']] data_clustering =", "17 23:03:32 2020 @author: quipo \"\"\" import pandas as pd import numpy as", "barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True)", "'O','ú': 'u', 'Ú': 'U'}} data_completo=pd.read_csv('../../datos_proscesados.csv',sep=',',encoding='utf8') data_completo = data_completo.apply(lambda x: x.str.strip() if(x.dtype == \"str\")", "@author: quipo \"\"\" import pandas as pd import numpy as np import re", "x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda x: x.str.strip() if(x.dtype", "joindata=joindata.drop('BARRIO',axis=1) #print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x)))", "data.loc[data.NOMBRE_MIN == f'{value}', 'ESTA']=1 \"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1", "'CLUSTER_CORREGIDO']=0 joindata.loc[joindata.CLUSTER == 1, 'CLUSTER_CORREGIDO']=1 joindata.loc[joindata.CLUSTER == 2, 'CLUSTER_CORREGIDO']=2 joindata=joindata.drop('CLUSTER',axis=1) joindata['CLUSTER']=joindata['CLUSTER_CORREGIDO'] joindata=joindata.drop('CLUSTER_CORREGIDO',axis=1) #data_analizar", "'ESTA']=1 \"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN']", "print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'), regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index,", "= data_clustering.apply(lambda x: x.str.strip() if(x.dtype == \"str\") else x) data=pd.read_csv('geoDataframe_cluster.csv',sep=';',encoding='utf8') data = data.apply(lambda", "data=data[['OBJECTID', 'CODIGO','NOMBRE', 'SUBTIPO_BA', 'NOMBRE_COM', 'SHAPEAREA', 'SHAPELEN']] #print(data.columns) #data.to_csv('geoDataframe_funciona.csv',sep=',',encoding='utf8',index=False) joindata['CLUSTER_CORREGIDO']=3 joindata.loc[joindata.CLUSTER == 0, 'CLUSTER_CORREGIDO']=0", "\"\"\" for index, value in barrios_geo.items(): barrios.loc[barrios.BARRIO== f'{value}', 'ESTA']=1 print(barrios[barrios['ESTA']==0]) barrios.to_csv('barrios.csv',sep=';',encoding='utf8',index=False) data['NOMBRE']=data['NOMBRE_MIN'] data=data.drop('NOMBRE_MIN',axis=1)", "== \"str\") else x) print(data.columns) data =data.drop('CLUSTER',axis=1) joindata = pd.merge(data,data_clustering, how='left', left_on=['NOMBRE'], right_on", "#print(data['NOMBRE']) barrios=data_completo['BARRIO'] barrios_geo=data['NOMBRE'] barrios=barrios.drop_duplicates() barrios_geo=barrios_geo.drop_duplicates() barrios=pd.DataFrame(barrios) barrios['ESTA']=0 print(barrios) data['NOMBRE_MIN']=data['NOMBRE'] #data['NOMBRE_MIN']=data['NOMBRE_MIN'].apply(lambda x: str(unidecode(x))) data=data.replace(diccionario_quitar_tildes('NOMBRE_MIN'),", "regex=True) data['NOMBRE_MIN']=data['NOMBRE_MIN'].str.lower() #barrios.to_csv('barrios.csv',sep=',',encoding='utf8',index=False) #data.drop_duplicates(subset='...') \"\"\" for index, value in barrios.items(): data.loc[data.NOMBRE_MIN == f'{value}',", "Created on Tue Nov 17 23:03:32 2020 @author: quipo \"\"\" import pandas as" ]
[ "self.wiser_hub_ip = '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file =", "filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' % json_file), 'r') as f: return f.read()", "os import unittest import logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171'", "<gh_stars>1-10 import os import unittest import logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip", "def load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' % json_file), 'r') as", "load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' % json_file), 'r') as f:", "#logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip)", "#self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' %", "setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file", "BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token =", "\"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__)", "= \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' % json_file),", "logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url =", "self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename", "= url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\"", "'192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file", "\"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s'", "import unittest import logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url", "unittest import logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url =", "= \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self,", "class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token", "= \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename,", "self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__) with", "json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' % json_file), 'r') as f: return", "self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file =", "def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\"", "import os import unittest import logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip =", "import logging #logging.basicConfig(level=logging.INFO) class BaseTest(unittest.TestCase): def setUp(self): self.wiser_hub_ip = '192.168.1.171' self.base_url = url", "url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def", "= \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename =", "\"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file): filename = os.path.dirname(__file__) with open(os.path.join(filename, 'data/%s' % json_file), 'r')", "\"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\" #self.source_data_file = \"all-with-itrv-and-hotwater.json\" def load_from_file(self, json_file):", "= '192.168.1.171' self.base_url = url = \"http://{}/data/domain/\".format(self.wiser_hub_ip) self.token = \"<PASSWORD>\" self.source_data_file = \"all-with-itrv.json\"" ]
[ "str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 =", "1; for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl =", "#!/usr/local/bin/python3 #encoding:utf8 ''' 作用:爬取京东商城手机分类下的的所有手机商品的展示图片。 url:为需要爬取的网址 page:页数 ''' import re import urllib.request def getimage(url,", "getimage(url, page): html = urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div id=\"plist\".+? <div", "imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try:", "html = str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html);", "#encoding:utf8 ''' 作用:爬取京东商城手机分类下的的所有手机商品的展示图片。 url:为需要爬取的网址 page:页数 ''' import re import urllib.request def getimage(url, page):", "in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl =", "if hasattr(e, 'reason'): x+=1; x+=1; for i in range(1, 2): url = \"https://list.jd.com/list.html?cat=9987,653,655&page=\"", "x = 1; for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg';", "x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for i in range(1, 2): url =", "except urllib.error.URLError as e: if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1;", "hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for i in range(1, 2):", "''' import re import urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read(); html =", "rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist", "<div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img width=\"220\"", "import urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read(); html = str(html); pattern1 =", "= re.compile(pattern2).findall(rst1); x = 1; for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3", "\"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename);", "imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e:", "class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img width=\"220\" height=\"220\"", "'<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 =", "imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0];", "urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 =", "width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1; for imageurl in imagelist:", ".+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1; for imageurl in imagelist: imagename =", "re.compile(pattern2).findall(rst1); x = 1; for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 =", "= re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if", "page:页数 ''' import re import urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read(); html", "clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg';", "try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if hasattr(e, 'code'): x+=1; if hasattr(e,", "= rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x =", "e: if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for i in", "height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1; for imageurl in imagelist: imagename", "urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div", "for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl);", "作用:爬取京东商城手机分类下的的所有手机商品的展示图片。 url:为需要爬取的网址 page:页数 ''' import re import urllib.request def getimage(url, page): html =", "re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if hasattr(e,", "'<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1; for imageurl in", "imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if hasattr(e, 'code'):", "filename=imagename); except urllib.error.URLError as e: if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1;", "= \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl,", "page): html = urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page", "re import urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read(); html = str(html); pattern1", "re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1);", "urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'):", "rst1 = rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x", "hasattr(e, 'reason'): x+=1; x+=1; for i in range(1, 2): url = \"https://list.jd.com/list.html?cat=9987,653,655&page=\" +", "= '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError", "= '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2", "= re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist =", "imagelist = re.compile(pattern2).findall(rst1); x = 1; for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\";", "urllib.error.URLError as e: if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for", "url:为需要爬取的网址 page:页数 ''' import re import urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read();", "'reason'): x+=1; x+=1; for i in range(1, 2): url = \"https://list.jd.com/list.html?cat=9987,653,655&page=\" + str(i);", "def getimage(url, page): html = urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div id=\"plist\".+?", "'//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as", "if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for i in range(1,", "''' 作用:爬取京东商城手机分类下的的所有手机商品的展示图片。 url:为需要爬取的网址 page:页数 ''' import re import urllib.request def getimage(url, page): html", "id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0]; pattern2 = '<img", "= '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1; for imageurl", "pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1 = rst1[0];", "import re import urllib.request def getimage(url, page): html = urllib.request.urlopen(url).read(); html = str(html);", "as e: if hasattr(e, 'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for i", "x+=1; for i in range(1, 2): url = \"https://list.jd.com/list.html?cat=9987,653,655&page=\" + str(i); getimage(url, i);", "= urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1", "= \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if hasattr(e, 'code'): x+=1;", "= str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">'; rst1 = re.compile(pattern1).findall(html); rst1", "html = urllib.request.urlopen(url).read(); html = str(html); pattern1 = '<div id=\"plist\".+? <div class=\"page clearfix\">';", "rst1[0]; pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1;", "imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl", "pattern2 = '<img width=\"220\" height=\"220\" .+?//.+?\\.jpg'; imagelist = re.compile(pattern2).findall(rst1); x = 1; for", "= 1; for imageurl in imagelist: imagename = \"Desktop/jd/\"+str(page)+\"-\"+str(x)+\".jpg\"; pattern3 = '//.+?\\.jpg'; imageurl", "x+=1; x+=1; for i in range(1, 2): url = \"https://list.jd.com/list.html?cat=9987,653,655&page=\" + str(i); getimage(url,", "\"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except urllib.error.URLError as e: if hasattr(e, 'code'): x+=1; if", "'code'): x+=1; if hasattr(e, 'reason'): x+=1; x+=1; for i in range(1, 2): url", "pattern3 = '//.+?\\.jpg'; imageurl = re.compile(pattern3).findall(imageurl); imageurl = \"http:\"+imageurl[0]; try: urllib.request.urlretrieve(imageurl, filename=imagename); except" ]
[ "= [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact,", "views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2,", "name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/', views.compare, name='compare'), path('generate/', views.generate, name='generate'),", "from django.urls import path from . import views urlpatterns = [ path(\"\", views.home,", "<gh_stars>1-10 from django.urls import path from . import views urlpatterns = [ path(\"\",", "path from . import views urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage,", "views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home,", "views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/', views.compare, name='compare'), path('generate/', views.generate,", "import views urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about,", "path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/', views.compare, name='compare'), path('generate/', views.generate, name='generate'), ]", "path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\",", "name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"),", "views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/', views.compare,", "views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release,", "urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\",", "#path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\",", "path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/',", "import path from . import views urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\",", "views urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"),", "name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"),", "name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"),", "from . import views urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"),", "#path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\",", "name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"),", "path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"), path(\"home/\",", "path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/', views.compare, name='compare'), path('generate/',", ". import views urlpatterns = [ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\",", "name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name, name=\"test\"), path('compare/', views.compare, name='compare'),", "django.urls import path from . import views urlpatterns = [ path(\"\", views.home, name=\"home\"),", "[ path(\"\", views.home, name=\"home\"), #path(\"<name>\", views.mainPage, name=\"main\"), path(\"about/\", views.about, name=\"about\"), #path(\"contact/\", views.contact, name=\"contact\"),", "views.contact, name=\"contact\"), path(\"home/\", views.home, name=\"main\"), path(\"release/\", views.release, name=\"release\"), path(\"home2/\", views.home2, name=\"home2\"), path(\"test/\", views.get_name," ]
[ "find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK", "flow - card payment --------------------- # GOV.UK pages, accept cost to view register", "render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def", "return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with no mortgage details", "@app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- #", "-------------------------- # Change history - pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html')", "render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register view", "- Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") #", "render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html',", "return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data", "data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens", "render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian 2nd", "# LAST OF THE ALPHA PROTOTYPES! # A \"citizen facing\" register concept #", "find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results", "transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def", "on GOV.UK and flows into register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\")", "page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\") data = json.load(json_data)", "1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1", "with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- #", "@app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) #", "Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def", "3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify", "render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants to find out ... something about", "# Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data", "# Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data)", "3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return", "sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub", "entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary')", "sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer -> conveyancer relationship --------------------------", "client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2 receives (all parties) confirmation", "create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub", "property (IDA + payment) # starts on GOV.UK and flows into register view", "Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html',", "- show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0():", "flow... # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify - Sub", "GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\")", "and flows into register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK", "Client can now view the register if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1():", "- transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes,", "@app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow Step", "-------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with GOV.UK", "register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes,", "{ 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def", "on GOV.UK and flows into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\")", "def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") #", "render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify():", "\"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done page", "# Verify + Payment + real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html',", "@app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary():", "prototypes - 2nd conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json',", "def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement --------------------------", "find out who owns a property (IDA + payment) # starts on GOV.UK", "--------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") #", "def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return", "__name__ == '__main__': # Bind to PORT if defined, otherwise default to 5000.", "json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states')", "PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)", "flows into register view # Verify + Payment + real fake title @app.route('/find-owner/d/search')", "return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') #", "Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2", "2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- #", "GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK", "3, prototype 1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html')", "return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return", "- Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step", "render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0():", "# Transfer prototypes, done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes,", "json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes", "token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1 confirms", "def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 - experian sign", "and flows into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK", "return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid')", "def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes", "prototypes, done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing the", "you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow Step", "prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data =", "@app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\") data = json.load(json_data) return", "= json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes -", "# GOV.UK pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html')", "2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify", "a property # starts on GOV.UK and flows into register view @app.route('/find-owner/search') def", "agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement", "Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return", "@app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework", "historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history - historical only -", "8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated", "# --------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def", "def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment --------------------- # GOV.UK pages,", "find out who owns a property rouute c - (IDA) (real fake title)", "# --------------------------------------------------------------------------- # Page prototypes, Register Changes View -------------------------- # Change history -", "view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing -----------------", "pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages,", "GOV.UK verify - Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def", "login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page", "render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who():", "GOV.UK and flows into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") #", "def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK Verification ---------------------", "client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2():", "Transfer prototypes - 2nd conveyancer, Step 4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready')", "# Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer", "@app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer", "transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer", "data=data) # Transfer prototypes, summary with no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage():", "email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\")", "who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub", "render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html')", "Step 2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\")", "client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow Step 2 - who", "# Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def", "3 - experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK", "prototypes, login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list", "Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 -", "render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn')", "def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return", "render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def", "experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify -", "# Transfer prototypes, summary with no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json',", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") #", "confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1 receives", "2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 -", "confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2 receives", "render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer ->", "into register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results", "render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return", "prototype 1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login')", "def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow Step 3", "Sprint 2, prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html')", "def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list():", "next_page=\"/find-owner/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\")", "mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary", "----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with", "relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- # Step", "next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") #", "return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\")", "--------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES! # A \"citizen facing\" register concept", "hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- #", "\"legal copy\" then this page can be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1():", "def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage", "Sub flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0():", "next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\")", "path @app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def", "Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html',", "return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html')", "user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 -", "# GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in')", "# Change history - historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return", "def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view register -----------------", "return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\")", "--------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return", "next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return", "render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 - experian 2nd phase", "return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor')", "render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def", "# GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") #", "on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes,", "data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that", "Transaction flows, relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html')", "history - historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') #", "sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") #", "3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4", "prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data)", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') #", "transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered():", "2 - who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK", "prototypes - 2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\")", "prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes,", "# scenario: user wants to find out who owns a property # starts", "next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def", "property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages,", "5 - Client can now view the register if they want to. @app.route('/relationship-starts/client-view-register')", "transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer, Step 2 -", "verify - Sub flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-login')", "next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth():", "verify - Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") #", "user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html',", "proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list", "conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with no mortgage details page @app.route('/transfer/summary-no-mortgage') def", "# end Sub flow - GOV.UK Verification --------------------- # Step 8 - Client", "details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow -", "property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario:", "GOV.UK and flows into register view # Verify + Payment + real fake", "@app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you') def", "1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def", "(IDA) (real fake title) # starts on GOV.UK and flows into register view", "def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow Step 2 -", "view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") #", "def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK Verification ---------------------", "--------------------------------------------------------------------------- # scenario: user wants to find out who owns a property #", "def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify - Step 1", "@app.route('/hack-day') def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list():", "sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub", "for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__ == '__main__': #", "Step 4 - experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html',", "straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def", "data=data, role=\"buyer\") # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json',", "@app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer - buyer", "find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def", "transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer", "set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6", "@app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def", "Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step", "# --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data)", "prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage", "- GOV.UK Verification --------------------- # Sub flow - card payment --------------------- # GOV.UK", "flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify -", "@app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card payment", "return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') #", "Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\")", "def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- #", "Verify --------------------- # verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\")", "return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK Verification --------------------- # GOV.UK", "find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants to find out ...", "Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub", "def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login')", "page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list')", "Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\")", "= json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new provisions page @app.route('/transfer/new-provisions')", "# Transfer prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\")", "page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data,", "flows, citizens sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html',", "Deed - reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login')", "you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow Step", "def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return", "render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint", "flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow", "Step 2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') #", "render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def", "(Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html')", "Sprint 3, Execute Deed - reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start():", "# Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') #", "return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST", "GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\")", "return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html')", "deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1", "\"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary", "json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- #", "3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\")", "Sub flow - GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 -----------------", "def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who():", "- Sub flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def", "sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html')", "- confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token", "GOV.UK pages, accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html',", "- Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\")", "@app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register Changes View", "return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html',", "--------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def", "def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def", "@app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow Step", "GOV.UK Verification --------------------- # Step 2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token') def", "c - (IDA) (real fake title) # starts on GOV.UK and flows into", "def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint", "next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK Verification --------------------- # GOV.UK pages, property", "--------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return", "- Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step", "= True # govuk_template asset path @app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/',", "message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1", "confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False,", "----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants", "withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data)", "# --------------------------------------------------------------------------- # Alternate Register view. V4 with help on show @app.route('/register-view/register-view-4-help-text') def", "- Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return", "def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer - buyer relationship", "results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property", "@app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3,", "render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0():", "- pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history -", "#casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\") data", "json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a - external process step", "(real fake title) # starts on GOV.UK and flows into register view @app.route('/find-owner/c/search')", "@app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment():", "IDA/Credit Card/login stuff ----------------- # Step 1 - login with GOV.UK Verify -", "conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data =", "def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer", "render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data =", "# verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify", "- experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") #", "client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def", "@app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3,", "data=data, role=\"seller\") # Transfer prototypes - 2nd conveyancer, Step 4 - transfer ready", "be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- #", "return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title')", "find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment --------------------- # GOV.UK pages, property", "Change history - pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change", "confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts,", "--------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token():", "# GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") #", "pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub", "sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow -", "deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1", "factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client", "property # starts on GOV.UK and flows into register view @app.route('/find-owner/search') def find_owner_search():", "- GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/b/verify')", "--------------------------------------------------------------------------- # scenario: user wants to find out who owns a property rouute", "conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data)", "- conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data)", "1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4", "return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0():", "render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return", "@app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step", "find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment')", "states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data)", "transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1", "os import json from flask import Flask, render_template from flask.ext.assets import Environment app", "GOV.UK verify - Sub flow Step 1 - for conveyancer create relationship flow", "the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 -", "- semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows,", "def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register Changes View --------------------------", "# starts on GOV.UK and flows into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return", "listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details", "@app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1 signs", "return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian", "3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK", "2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub", "in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK", "Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- #", "1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow", "# # If we're having to download a \"legal copy\" then this page", "# GOV.UK verify - Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you')", "# Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT',", "details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True,", "flows, citizens sign transfer and charge v3 ----------------- # Step 1a - external", "def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property')", "Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 -", "return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0():", "into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results", "@app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm')", "# Sub flow - GOV.UK Verification --------------------- # GOV.UK verify - Sub flow", "2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing')", "render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian sign", "editable=False, data=data, role=\"seller\") # Transfer prototypes - 2nd conveyancer, Step 4 - transfer", "@app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered')", "home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return", "return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html')", "= json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def", "return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return", "GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # ---------------------------------------------------------------------------", "experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end", "transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer", "who owns a property rouute c - (IDA) (real fake title) # starts", "3 - results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') #", "@app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference():", "json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary with option to withdraw", "pay to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end", "json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd conveyancer, Step 3 -", "transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data)", "signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client", "4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer", "find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK Verification --------------------- #", "starts on GOV.UK and flows into register view # Verify + Payment +", "# Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html',", "mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html',", "register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html')", "@app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification ---------------------", "# GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html',", "return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s) confirm v2.2 --------", "if __name__ == '__main__': # Bind to PORT if defined, otherwise default to", "history - pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history", "= json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing the transfer", "render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants to find out who owns", "with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 -", "login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2", "return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer, Step 2 - conveyancer-case-list", "transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\",", "@app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details", "----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0 -----------------", "starts on GOV.UK and flows into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html',", "def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow - card payment ---------------------", "\"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer", "data=data) # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json',", "- Sub flow Step 2 - who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return", "GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def", "\"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer page", "# starts on GOV.UK and flows into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return", "- Sub flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def", "use sub flow... # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify", "ABR + '.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # ---------------------------------------------------------------------------", "flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return", "flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify -", "# Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') #", "return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants to find out who", "3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return", "changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history - historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0')", "@app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm')", "# Transfer prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\")", "if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 -", "v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login", "render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return", "results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property", "def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2():", "- historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # ---------------------------------------------------------------------------", "4 - experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\")", "def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def", "2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow", "buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code():", "3 - Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data", "def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer -> conveyancer relationship", "--------------------------------------------------------------------------- # Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1')", "conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login():", "@app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer", "json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer():", "GOV.UK verify - Sub flow Step 2 - who verified you @app.route('/find-owner/who-verified-you') def", "view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing -----------------", "- external process step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return", "register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing", "verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow", "# Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data)", "transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed", "sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html')", "# --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- #", "Transaction flows, citizens sign transfer and charge v3 ----------------- # Step 1a -", "- experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") #", "only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page", "flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow Step", "# Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login')", "help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer", "sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html')", "--------------------------------------------------------------------------- # Alternate Register view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded():", "experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub", "- 2nd conveyancer, Step 4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready():", "flow Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') #", "def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff ----------------- #", "- confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html',", "render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register Changes View -------------------------- # Change history", "render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # ---------------------------------------------------------------------------", "render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with GOV.UK Verify - use sub flow...", "1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3", "Step 8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') #", "return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\")", "Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') #", "2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data =", "render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html')", "relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- # Step", "prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return", "flask import Flask, render_template from flask.ext.assets import Environment app = Flask(__name__) app.debug =", "-------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def", "@app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data)", "# --------------------------------------------------------------------------- # Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html')", "return render_template('transfer/done.html') # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json',", "Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK", "transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes,", "sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html')", "--------------------------------------------------------------------------- # GOV.UK pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return", "verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub", "find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def", "verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") #", "THE ALPHA PROTOTYPES! # A \"citizen facing\" register concept # # If we're", "render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms')", "import Flask, render_template from flask.ext.assets import Environment app = Flask(__name__) app.debug = True", "def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype", "# A \"citizen facing\" register concept # # If we're having to download", "pages, accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\")", "citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page", "# verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify", "data=data) # Transfer prototypes, done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer", "@app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 - experian", "you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow", "----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example", "- results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step", "OF THE ALPHA PROTOTYPES! # A \"citizen facing\" register concept # # If", "prototypes, Register Changes View -------------------------- # Change history - pending and historical @app.route('/changes-view/changes-1.0')", "agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze", "title) # starts on GOV.UK and flows into register view @app.route('/find-owner/c/search') def find_owner_c_search():", "- Sub flow Step 3 - experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return", "return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data", "create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify -", "Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # ---------------------------------------------------------------------------", "(IDA + payment) # starts on GOV.UK and flows into register view @app.route('/find-owner/b/search')", "transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step", "# --------------------------------------------------------------------------- # scenario: user wants to find out ... something about a", "Step 4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\")", "return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed():", "client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2 visits start page @app.route('/relationship-starts/client-2-start')", "2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client", "Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify", "----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\")", "sub flow... # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify -", "app = Flask(__name__) app.debug = True # govuk_template asset path @app.context_processor def asset_path_context_processor():", "# GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') #", "def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def", "sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has been", "render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def", "page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with GOV.UK", "next_page=\"/find-owner/d/register-view\") # end card payment --------------------- # GOV.UK pages, property details v2.0 -----------------", "10 - Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html')", "Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html')", "render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded')", "return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd conveyancer, Step 3 - confirm", "- Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9", "render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def", "next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view. V4 with help on", "editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def", "@app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor():", "Step 10 - Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return", "def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page", "def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def", "def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") #", "transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- #", "def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2 visits start page", "return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return", "return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view. V4 with", "return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') #", "def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1 2 factor", "Sub flow - GOV.UK Verification --------------------- # Step 8 - Client 2 enters", "wants to find out who owns a property # starts on GOV.UK and", "transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge", "render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference():", "def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return", "return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES! # A", "@app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def", "editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller():", "GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK", "3, Execute Deed - reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return", "@app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2,", "govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0():", "render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment():", "sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow -", "confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token @app.route('/relationship-starts/conveyancer-token')", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\")", "sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html')", "Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 -", "def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page", "task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the number", "def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and select correct property", "# verify - Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return", "data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def", "@app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task')", "-------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement --------------------------", "- (IDA) (real fake title) # starts on GOV.UK and flows into register", "next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return", "data=data) # Transfer prototypes - 2nd conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer')", "register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to", "v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1", "return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register", "register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html')", "def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") #", "# GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/experian-sign-in')", "def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants to find out", "'.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record')", "render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed - reworked", "verify - Sub flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login')", "next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with", "# Transaction flows, relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return", "return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return", "render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return", "@app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed - reworked from", "Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1", "Step 4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') #", "provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page", "@app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1 signs", "return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return", "find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card payment --------------------- #", "reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4,", "render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow - card payment --------------------- # --------------------------------------------------------------------------- #", "flow Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') #", "v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with", "json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes,", "+ real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages,", "1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') #", "def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return", "--------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK", "flows into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages,", "Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") #", "end sub flow - card payment --------------------- # --------------------------------------------------------------------------- # scenario: user wants", "Sub flow - GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step", "render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return", "# Step 6 - Client 2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return", "relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow Step 3 - experian", "Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3", "v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in", "flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return", "#casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html',", "return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with GOV.UK Verify - use sub", "signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 -", "select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set", "def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return", "@app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1')", "render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def", "verify - Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") #", "- experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify", "Payment + real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK", "charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 -", "details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user", "data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer page @app.route('/transfer/create-transfer')", "render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def", "editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with no mortgage details page @app.route('/transfer/summary-no-mortgage')", "----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__ == '__main__': # Bind to", "@app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct property", "transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token')", "- Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3", "# Transaction flows, citizens sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0():", "return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v3", "card payment --------------------- # GOV.UK pages, accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost')", "sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1,", "@app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register():", "2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow", "- Sub flow Step 1 - for conveyancer create relationship flow @app.route('/find-owner/verify') def", "Transfer prototypes - 2nd conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login():", "# Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9", "return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify - Step 1 @app.route('/find-owner/d/verify') def", "def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow --------------------------", "\"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes -", "flows, relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') #", "return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') #", "@app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers():", "@app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow Step", "v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user", "3a - external process step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0():", "page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details')", "with GOV.UK Verify - use sub flow... # Sub flow - GOV.UK Verification", "register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view. V4", "sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK", "render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer -", "data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record():", "property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify ---------------------", "def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can now view the", "list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework", "# Step 5 - Client can now view the register if they want", "@app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def", "return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') #", "return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card payment --------------------- # pay", "that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return", "-------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze register)", "return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- # Step 8", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") #", "conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2():", "Verification --------------------- # Step 2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1():", "-> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return", "return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 - experian 2nd", "govuk_template asset path @app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' }", "scenario: user wants to find out who owns a property rouute c -", "\"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary with", "editable=True, data=data) # Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html')", "Alternate Register view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html',", "@app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment --------------------- # GOV.UK", "= json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' +", "= json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a - external process", "pages - for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__ ==", "@app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow Step 3", "# Transaction flows, citizens sign transfer and charge v3 ----------------- # Step 1a", "\"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def", "= json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary with empty states", "@app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view. V4 with", "return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants to find out who owns", "Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub", "@app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages", "details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit", "deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data,", "a property (IDA + payment) # starts on GOV.UK and flows into register", "token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2 confirms", "GOV.UK pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') #", "render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify -", "def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto():", "render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def", "return render_template('changes-view/changes-1.0.html') # Change history - historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def", "# --------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def", "render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def", "Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return", "property # starts on GOV.UK and flows into register view # Verify +", "render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") #", "flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return", "much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # -----------------", "- experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify", "return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register Changes View -------------------------- # Change", "def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0')", "Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step", "v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0", "@app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0')", "end Sub flow - GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0", "correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") #", "# --------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return", "client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with GOV.UK Verify - use", "return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404)", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") #", "----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1 -----------------", "\"citizen facing\" register concept # # If we're having to download a \"legal", "find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results():", "--------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return", "- experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end", "page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data)", "# Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return", "- Sub flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def", "@app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs", "card payment --------------------- # --------------------------------------------------------------------------- # scenario: user wants to find out who", "render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0')", "return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def", "return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer", "sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed", "create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub", "PROTOTYPES! # A \"citizen facing\" register concept # # If we're having to", "register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing", "Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html')", "page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing the transfer page", "Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example", "verify - Sub flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2')", "# Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') #", "render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF", "-------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in @app.route('/relationship-starts/login')", "return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete')", "return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0():", "next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian sign in", "@app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data)", "sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html')", "def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 -", "render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants to find out who owns a", "@app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0 -----------------", "= json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens sign", "verify - Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1():", "- experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end", "def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") #", "signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return", "been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True,", "user wants to find out ... something about a property # starts on", "def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow Step 3 -", "--------------------- # Step 2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return", "--------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login')", "Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html')", "details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property", "render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html')", "@app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs", "owns a property (IDA + payment) # starts on GOV.UK and flows into", "# --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view. V4 with help on show", "details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user", "- login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step", "to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2 visits", "4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login():", "find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def", "return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def", "pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages,", "def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed')", "return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow Step 2 - who verified", "render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login", "# Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html',", "--------------------------------------------------------------------------- # Alternate Register view. V4 with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text():", "flask.ext.assets import Environment app = Flask(__name__) app.debug = True # govuk_template asset path", "in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow", "return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return", "details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- #", "9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction", "render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v3 -----------------", "----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment --------------------- #", "2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def", "new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details", "@app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4", "flow - GOV.UK Verification --------------------- # Step 8 - Client 2 enters token", "sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html')", "# GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") #", "find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results():", "- 2nd conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\")", "'/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return", "view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay", "@app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification ---------------------", "Step 3 - experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") #", "Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html')", "render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return", "@app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with GOV.UK Verify", "find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 -", "results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4", "Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify", "Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1():", "@app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__ == '__main__': # Bind to PORT", "pages, IDA/Credit Card/login stuff ----------------- # Step 1 - login with GOV.UK Verify", "----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow - card", "authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1", "Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data =", "def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing a", "@app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants to", "def example_1(): return render_template('examples/example-page.html') if __name__ == '__main__': # Bind to PORT if", "- for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK", "Sub flow - card payment --------------------- # GOV.UK pages, accept cost to view", "v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants", "Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub", "@app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters", "flows into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages,", "Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') #", "prototypes, summary with no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data", "def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select')", "# Step 3 - Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json',", "token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1", "@app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register View --------------------------", "# Step 3a - external process step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms')", "transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data)", "# end Sub flow - GOV.UK Verification --------------------- # GOV.UK pages, property details", "on GOV.UK and flows into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\")", "view. V4 with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") #", "- Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def", "find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 -", "sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2,", "create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes,", "True # govuk_template asset path @app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path':", "transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return", "the register if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step", "2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") #", "# end Verify --------------------- # card payment --------------------- # pay to view register", "GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/experian-sign-in') def", "Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 -", "def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute", "7 - login with GOV.UK Verify - use sub flow... # Sub flow", "--------------------------------------------------------------------------- # Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm')", "end Sub flow - GOV.UK Verification --------------------- # Sub flow - card payment", "want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2", "1 - login with GOV.UK Verify - use sub flow... # Sub flow", "transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1", "of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st", "v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify", "render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\")", "return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes - 2nd conveyancer, Step 4", "# Step 4 - Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return", "flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return", "--------------------------------------------------------------------------- # Page prototypes, Register Changes View -------------------------- # Change history - pending", "return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token')", "2nd conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\")", "transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token')", "# --------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return", "sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- #", "1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you')", "next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian 2nd phase", "Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history():", "hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\")", "return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in')", "Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") #", "role=\"citizen\") # Step 3a - external process step - show user sms message", "sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow", "json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR", "----------------- # Step 1 - login with GOV.UK Verify - use sub flow...", "Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3", "import json from flask import Flask, render_template from flask.ext.assets import Environment app =", "data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes", "@app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2')", "# Step 2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html',", "changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1')", "app.debug = True # govuk_template asset path @app.context_processor def asset_path_context_processor(): return { 'asset_path':", "transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation", "Transfer prototypes, done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing", "def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def", "listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details", "V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # ---------------------------------------------------------------------------", "render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register Changes", "def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow Step 3", "@app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change():", "register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow -", "verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") #", "sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub", "render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return", "- Sub flow Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return", "transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1 signs transfer deed", "hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2():", "2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step", "render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def", "Sub flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1():", "page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data,", "conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the number of clients @app.route('/relationship-starts/conveyancer-add-clients')", "find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants to find out", "open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate", "render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1():", "# Step 2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html',", "signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return", "# GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') #", "to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card", "render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian sign", "next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian sign in", "Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data)", "return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment --------------------- # GOV.UK pages, property details", "render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype 1:", "out who owns a property # starts on GOV.UK and flows into register", "# Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def", "@app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view register", "2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub", "sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html')", "next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html',", "register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed')", "return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register')", "# Step 8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html')", "pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages,", "to find out ... something about a property # starts on GOV.UK and", "verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1():", "# Step 4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html',", "verify - Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1():", "find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK", "role=\"buyer\") # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\")", "1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") #", "return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow Step 3 - experian sign", "def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0')", "@app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2():", "Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step", "render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def", "# Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') #", "verify - Sub flow Step 2 - who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who():", "Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK", "v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details", "@app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step", "# Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') #", "return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search / start v2.0 -----------------", "v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view.", "phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow", "@app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow Step", "1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2", "1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return", "data=data) # Transfer prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json',", "@app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results')", "def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return", "json_data=open('app/static/data/' + ABR + '.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases')", "pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history - historical", "view the register if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') #", "end Verify --------------------- # card payment --------------------- # pay to view register -----------------", "sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def", "# Alternate Register view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return", "sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4", "--------------------- # verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") #", "pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages,", "owns a property rouute c - (IDA) (real fake title) # starts on", "editable=True, data=data) # Transfer prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option():", "render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return", "# scenario: user wants to find out who owns a property rouute c", "\"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing", "return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login') def transfer_login():", "find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 - experian 2nd phase", "def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1')", "next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0():", "find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results():", "render_template('transfer/done.html') # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\")", "Step 1a - external process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0():", "GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1 - for", "def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return", "Sub flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1():", "transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes -", "details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario:", "render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html')", "owns a property # starts on GOV.UK and flows into register view @app.route('/find-owner/search')", "render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify -", "sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html')", "find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK Verification --------------------- #", "page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\",", "Flask, render_template from flask.ext.assets import Environment app = Flask(__name__) app.debug = True #", "end card payment --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def", "# Example pages - for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if", "def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage", "flow - GOV.UK Verification --------------------- # Step 2 - Client 1 enters token", "# Step 10 - Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2():", "\"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd conveyancer,", "find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff ----------------- # Step", "property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and select", "Sub flow Step 1 - for conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify():", "----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify -", "transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer", "phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow -", "def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants to find out", "2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm')", "role=\"seller\") # Transfer prototypes - 2nd conveyancer, Step 4 - transfer ready to", "- select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 -", "def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e):", "@app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0')", "phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow", "GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/b/verify') def", "phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow -", "import os import json from flask import Flask, render_template from flask.ext.assets import Environment", "@app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification ---------------------", "json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes - 2nd", "def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0')", "json from flask import Flask, render_template from flask.ext.assets import Environment app = Flask(__name__)", "transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes,", "def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed - reworked from sprint", "flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return", "@app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing", "Sub flow Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html',", "details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details", "render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start')", "- Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return", "render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow Step 2 - who verified", "def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token", "for conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK", "# GOV.UK verify - Sub flow Step 2 - who verified you @app.route('/find-owner/who-verified-you')", "page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer,", "# Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') #", "return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2():", "flows into register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages,", "render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def", "GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/c/verify') def", "- who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify", "and charge v3 ----------------- # Step 1a - external process step - show", "@app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete():", "Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") #", "- experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") #", "historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- #", "GOV.UK pages, IDA/Credit Card/login stuff ----------------- # Step 1 - login with GOV.UK", "return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token')", "def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike", "- nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes,", "def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return", "@app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow Step 3", "return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def", "find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return", "Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- #", "json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new", "the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html',", "page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data,", "= json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary with option to", "@app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token():", "Step 4 - experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html',", "# --------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def", "def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow Step 2 -", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # ---------------------------------------------------------------------------", "to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub", "render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') #", "with no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data)", "1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 -", "json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes,", "next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian sign in", "flow - GOV.UK Verification --------------------- # Sub flow - card payment --------------------- #", "# Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data =", "render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list')", "view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow", "can now view the register if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return", "@app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK Verification", "transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1 2 factor authentication", "- Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') #", "# GOV.UK verify - Sub flow Step 1 - for conveyancer create relationship", "return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow Step 3 - experian sign", "@app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- #", "select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select", "- Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data =", "data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary with empty", "@app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\")", "facing\" register concept # # If we're having to download a \"legal copy\"", "client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow Step 2 - who", "return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 - experian 2nd", "page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\",", "+ '.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday", "transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation", "return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer')", "@app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze register) ---------------------------------------", "--------------------------------------------------------------------------- # Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history')", "parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship", "- experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") #", "return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return", "render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who():", "flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm')", "Step 3 - Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\")", "----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search", "return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff ----------------- # Step 1", "next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\")", "render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES! # A \"citizen", "next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian 2nd phase", "Verify - use sub flow... # Sub flow - GOV.UK Verification --------------------- #", "prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def", "sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html')", "sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3,", "next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow Step 2 - who verified you", "Card/login stuff ----------------- # Step 1 - login with GOV.UK Verify - use", "- Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") #", "property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\")", "Step 5 - Client can now view the register if they want to.", "transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer initiates v2.2", "3, prototype 1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html')", "sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html')", "# Step 5 - set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return", "data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\")", "view # Verify + Payment + real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return", "--------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html')", "sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def", "who owns a property (IDA + payment) # starts on GOV.UK and flows", "def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html',", "- login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes -", "# GOV.UK pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html',", "render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html',", "@app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE ALPHA", "return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return", "sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html')", "- experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify -", "# Step 7 - login with GOV.UK Verify - use sub flow... #", "1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction", "client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can now view the register", "Change history - historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html')", "-------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def", "\"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method')", "--------------------- # Sub flow - card payment --------------------- # GOV.UK pages, accept cost", "prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data)", "1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow", "mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer", "- set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step", "render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\")", "def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0')", "initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log", "starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1", "user wants to find out who owns a property # starts on GOV.UK", "render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def", "@app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the number of", "def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer", "from flask.ext.assets import Environment app = Flask(__name__) app.debug = True # govuk_template asset", "flow - GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view')", "transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login')", "= json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done page @app.route('/transfer/done') def", "return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json',", "return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR +", "verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify -", "--------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2():", "next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return", "Sprint 2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete')", "--------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0():", "Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve", "sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed - reworked from sprint 2", "Transfer prototypes, summary with no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\")", "from flask import Flask, render_template from flask.ext.assets import Environment app = Flask(__name__) app.debug", "return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with GOV.UK Verify - use sub", "and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history - historical only", "property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario:", "2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify", "flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify -", "= Flask(__name__) app.debug = True # govuk_template asset path @app.context_processor def asset_path_context_processor(): return", "and flows into register view # Verify + Payment + real fake title", "@app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/results')", "def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow Step 2", "@app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search /", "pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") #", "def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def", "Verification --------------------- # GOV.UK verify - Sub flow Step 1 - for conveyancer", "1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 -", "# Page prototypes, Register Changes View -------------------------- # Change history - pending and", "listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details", "- Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK", "transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\")", "results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property", "empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True,", "- for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__ == '__main__':", "3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\")", "conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) #", "register if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6", "= json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes - 2nd conveyancer,", "edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html')", "find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow Step 2 -", "v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK", "next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian 2nd phase", "signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0(): json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html',", "@app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow Step", "render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html',", "render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token')", "render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html',", "find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results():", "generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship", "return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view():", "def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history - historical only - nothing pending", "Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1", "Step 1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step", "return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian", "@app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0 -----------------", "json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer", "# Transaction flows, relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return", "# Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return", "stuff ----------------- # Step 1 - login with GOV.UK Verify - use sub", "@app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow - card payment", "def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint", "1 - for conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\")", "return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute", "prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage", "Transfer prototypes, login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes,", "sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html')", "render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge", "having to download a \"legal copy\" then this page can be much more", "1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') #", "def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view')", "register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html')", "external process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\")", "# Transfer prototypes - 2nd conveyancer, Step 4 - transfer ready to sign", "return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return", "json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd", "@app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has been withdrawn", "Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html')", "return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select():", "#hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\")", "@app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer, Step", "receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction", "render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian 2nd", "next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE", "GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client", "@app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title():", "flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify -", "render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment --------------------- # GOV.UK pages, property details v2.0", "create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html',", "- Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return", "Step 4 - experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html',", "return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 -", "def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card payment ---------------------", "----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification", "- login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step", "@app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK", "to download a \"legal copy\" then this page can be much more straightforward", "# GOV.UK verify - Sub flow Step 4 - experian 2nd phase sign", "2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow", "render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html')", "data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing the", "Register view. V4 with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\")", "# GOV.UK verify - Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you')", "next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return", "start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with", "return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option')", "out ... something about a property # starts on GOV.UK and flows into", "number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add", "semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens", "flow Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') #", "reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html')", "9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10", "return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view. V4 with sections fully open", "data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done page @app.route('/transfer/done')", "out who owns a property rouute c - (IDA) (real fake title) #", "- card payment --------------------- # GOV.UK pages, accept cost to view register -----------------", "listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details", "in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow", "copy\" then this page can be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return", "def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def", "- find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 -", "a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html')", "Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client", "def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") #", "- experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify -", "client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 -", "GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK", "Changes View -------------------------- # Change history - pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0():", "3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK", "in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct", "1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client", "return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def", "v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants", "def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with GOV.UK Verify -", "next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0():", "@app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute Deed -----------------------------------------", "2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 -", "'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e):", "with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html',", "render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- # Step 2 -", "return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done page @app.route('/transfer/done') def transfer_done(): return", "json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary", "card payment --------------------- # pay to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return", "return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions():", "flow Step 3 - experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\")", "# --------------------------------------------------------------------------- # Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html')", "def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return", "clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st client", "def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1 signs transfer", "GOV.UK verify - Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def", "@app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with GOV.UK Verify", "Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html')", "confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens sign", "- Client 2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step", "# Sprint 3, Execute Deed - reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def", "----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification", "next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") # ---------------------------------------------------------------------------", "def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return", "json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create transfer", "# scenario: user wants to find out ... something about a property #", "sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1,", "starts on GOV.UK and flows into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html',", "experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify -", "data=data, role=\"citizen\") # Step 3a - external process step - show user sms", "about a property # starts on GOV.UK and flows into register view #", "render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer, Step", "- Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # ---------------------------------------------------------------------------", "find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify - Step 1 @app.route('/find-owner/d/verify')", "in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow", "- Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 -", "summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True,", "# Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') #", "@app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\")", "login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2", "GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def", "pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages,", "- 2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data", "\"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new provisions", "into register view # Verify + Payment + real fake title @app.route('/find-owner/d/search') def", "# Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') #", "render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow Step 3 - experian sign in", "def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi", "render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff ----------------- # Step 1 -", "- Client can now view the register if they want to. @app.route('/relationship-starts/client-view-register') def", "return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer", "login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd", "@app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results')", "LAST OF THE ALPHA PROTOTYPES! # A \"citizen facing\" register concept # #", "--------------------- # pay to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\")", "mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details", "def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1 signs transfer", "# scenario: user wants to find out who owns a property (IDA +", "with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 -", "relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow", "@app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and select correct", "option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html',", "@app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants to find", "return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that has been withdrawn", "transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed", "2nd conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data", "enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1", "1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 -", "enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client", "@app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\")", "return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can now view the register if", "return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0():", "# --------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer, Step 1 - login page", "payment --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return", "process step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\")", "relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register')", "return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing')", "json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done page @app.route('/transfer/done') def transfer_done():", "Transaction flows, relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html')", "property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated task", "- card payment --------------------- # --------------------------------------------------------------------------- # scenario: user wants to find out", "sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow -", "-------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def", "A \"citizen facing\" register concept # # If we're having to download a", "def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def", "govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants to find out who", "flows, relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') #", "--------------------------------------------------------------------------- # scenario: user wants to find out ... something about a property", "return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian", "--------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html')", "concept # # If we're having to download a \"legal copy\" then this", "--------------------------------------------------------------------------- # scenario: user wants to find out who owns a property (IDA", "# --------------------------------------------------------------------------- # Alternate Register view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def", "----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0 -----------------", "render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can now view the register if they", "- semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows,", "conveyancer, Step 4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html')", "to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False,", "Flask(__name__) app.debug = True # govuk_template asset path @app.context_processor def asset_path_context_processor(): return {", "render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html',", "return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete')", "render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') # ---------------------------------------------------------------------------", "next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES! # A \"citizen facing\"", "flow Step 1 - for conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return", "# --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start')", "return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login')", "that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return", "# GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html',", "json_data=open('app/static/data/transfer-signing-data.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a", "# GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in')", "4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step", "v3 ----------------- # Step 1a - external process step - show user email", "sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html')", "4 - experian 2nd phase sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') #", "starts on GOV.UK and flows into register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html',", "data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer, Step 1 -", "return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship", "Sub flow - GOV.UK Verification --------------------- # Sub flow - card payment ---------------------", "def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def", "- Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") #", "- Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step", "return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return", "next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK Verification --------------------- # GOV.UK pages, property", "# Transfer prototypes, login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer", "next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a - external process step - show user", "experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end", "payment --------------------- # --------------------------------------------------------------------------- # scenario: user wants to find out who owns", "@app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1 receives confirmation", "Verify --------------------- # card payment --------------------- # pay to view register ----------------- @app.route('/find-owner/d/card-payment')", "def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # ---------------------------------------------------------------------------", "render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2():", "--------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view. V4 with help on show @app.route('/register-view/register-view-4-help-text')", "register_hybrid(): return render_template('register-view/register-hybrid.html') # --------------------------------------------------------------------------- # Page prototypes, Register Changes View -------------------------- #", "- show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step", "- Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # ---------------------------------------------------------------------------", "@app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers ----------------------------------------- @app.route('/examples/example-1')", "GOV.UK pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK", "# GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") #", "return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return", "backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1():", "return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') #", "# Alternate Register view. V4 with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return", "return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1():", "register concept # # If we're having to download a \"legal copy\" then", "- Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\")", "payment) # starts on GOV.UK and flows into register view @app.route('/find-owner/b/search') def find_owner_b_search():", "Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html')", "Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return", "login with GOV.UK Verify - use sub flow... # Sub flow - GOV.UK", "render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states():", "@app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow Step 2", "def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return", "Step 6 - Client 2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html')", "ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that", "render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK Verification --------------------- # GOV.UK pages,", "@app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2 visits start", "find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 -", "# Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # ---------------------------------------------------------------------------", "govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1():", "def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return", "--------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html',", "return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify", "to find out who owns a property rouute c - (IDA) (real fake", "page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return", "transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # ---------------------------------------------------------------------------", "== '__main__': # Bind to PORT if defined, otherwise default to 5000. port", "data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/'", "# --------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES! # A \"citizen facing\" register", "pages, property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages,", "render_template('examples/example-page.html') if __name__ == '__main__': # Bind to PORT if defined, otherwise default", "prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data)", "If we're having to download a \"legal copy\" then this page can be", "next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html')", "no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return", "Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page", "ALPHA PROTOTYPES! # A \"citizen facing\" register concept # # If we're having", "return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') #", "find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def", "@app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK", "flow - GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1", "prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data =", "from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return", "flows, relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') #", "details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered", "- buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return", "def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def", "# starts on GOV.UK and flows into register view # Verify + Payment", "return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return", "render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return", "phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify ---------------------", "json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def", "@app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def", "render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def", "render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0():", "render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 - experian 2nd phase sign in", "render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html')", "sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html')", "render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def", "register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login') def", "pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK", "Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes,", "- Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK", "render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def", "experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub", "5 - Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') #", "buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html')", "Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK", "verify - Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2')", "Step 5 - set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html')", "# Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') #", "Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return", "@app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title():", "done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing the transfer", "return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register", "GOV.UK and flows into register view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") #", "render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json',", "@app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow Step", "# --------------------------------------------------------------------------- # Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html')", "Transaction flows, relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html')", "return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start')", "next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client 1 signs transfer deed @app.route('/transfer-and-charge-v3/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_3_0():", "Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step", "return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def", "casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data,", "transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes,", "render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html')", "reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference')", "Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify", "4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") #", "'/static/' } @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404", "def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0(): return", "----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- # scenario: user wants to", "return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed -", "more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment')", "4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5", "def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify", "transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True,", "- who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify -", "Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # ---------------------------------------------------------------------------", "pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK", "fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing", "Step 1 - login with GOV.UK Verify - use sub flow... # Sub", "json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with no mortgage", "(all parties) confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows,", "render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a - external process step - show", "client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def", "Client 2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7", "this page can be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\")", "who owns a property # starts on GOV.UK and flows into register view", "in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK", "a property rouute c - (IDA) (real fake title) # starts on GOV.UK", "render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian sign", "Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK", "register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing", "they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client", "GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK", "3 - experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify", "to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0',", "5 - Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') #", "relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 - experian", "user wants to find out who owns a property rouute c - (IDA)", "flow Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') #", "Step 2 - who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") #", "details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR): json_data=open('app/static/data/' + ABR + '.json', \"r\") data =", "= json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd conveyancer, Step 3", "def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 - experian 2nd", "@app.route('/register-view/register-3.0') def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid():", "render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json',", "@app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) #", "@app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") #", "----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view. V4", "def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return", "sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow", "withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data)", "experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end", "def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token", "conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify -", "Sub flow - GOV.UK Verification --------------------- # Step 2 - Client 1 enters", "--------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v3 ----------------- # Step", "verify - Step 4 - experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1():", "has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html',", "conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") #", "Step 4 - experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html',", "json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done", "property rouute c - (IDA) (real fake title) # starts on GOV.UK and", "to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages,", "next_page=\"/find-owner/register-view\") # end sub flow - card payment --------------------- # --------------------------------------------------------------------------- # scenario:", "--------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html')", "to find out who owns a property # starts on GOV.UK and flows", "something about a property # starts on GOV.UK and flows into register view", "view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment", "render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2():", "# Step 1a - external process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def", "- Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def", "been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False,", "- GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def", "render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd conveyancer, Step 3 - confirm page", "- GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def", "conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return", "@app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3,", "render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data", "next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html',", "@app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1 2", "to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has", "register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # --------------------------------------------------------------------------- # ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html',", "who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify -", "@app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can now view", "= json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return", "find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 -", "- for conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") #", "GOV.UK pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\")", "def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint", "render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return", "--------------------- # --------------------------------------------------------------------------- # scenario: user wants to find out who owns a", "in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification", "relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step", "-------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def", "# --------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def", "Verification --------------------- # Step 8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0():", "verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1():", "find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view. V4 with sections fully", "data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with", "# end Sub flow - GOV.UK Verification --------------------- # Sub flow - card", "Verification --------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify():", "return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants to find out ... something", "experian sign in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub", "4 - experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\")", "return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases')", "- add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 -", "@app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token():", "client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd client", "render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike - Execute Deed", "def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow Step 3", "2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3", "fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- #", "Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step", "register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") # end card payment ---------------------", "return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth')", "verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow", "return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing a \"token\" -----------------------------------------", "card payment --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view():", "Transaction flows, citizens sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return", "verify - Sub flow Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2')", "rouute c - (IDA) (real fake title) # starts on GOV.UK and flows", "verify - Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1():", "in @app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow Step", "view @app.route('/find-owner/search') def find_owner_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing -----------------", "# Change history - pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') #", "relationship starts, client(s) confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step", "# GOV.UK pages, IDA/Credit Card/login stuff ----------------- # Step 1 - login with", "next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html',", "verify - Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2')", "flow Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\")", "register view # Verify + Payment + real fake title @app.route('/find-owner/d/search') def find_owner_d_search():", "experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step", "page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) #", "conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return", "2, prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action')", "@app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement", "# Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return", "editable=True, data=data) # Transfer prototypes, done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') #", "render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def", "find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you')", "GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def", "render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day')", "def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow Step 2", "Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return", "@app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register", "2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow", "- Sub flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def", "data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes - 2nd", "render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto')", "json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html',", "property details v2.0 ----------------- @app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property", "# Transfer prototypes - 2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list():", "@app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK Verification", "- add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 -", "def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 -", "1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action():", "def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification --------------------- #", "def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer", "json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction", "verify - Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1():", "GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client", "Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return", "GOV.UK Verification --------------------- # Sub flow - card payment --------------------- # GOV.UK pages,", "Transfer prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data", "render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer prototypes, done page @app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html')", "render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step", "pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html') # --------------------------------------------------------------------------- #", "return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed')", "- Sub flow Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return", "render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def", "def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and", "pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify", "next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\")", "with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return", "external process step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html',", "# --------------------------------------------------------------------------- # scenario: user wants to find out who owns a property", "page can be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") #", "def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def", "- who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify -", "+ payment) # starts on GOV.UK and flows into register view @app.route('/find-owner/b/search') def", "- experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify -", "transfer and charge v3 ----------------- # Step 1a - external process step -", "----------------- @app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0", "Step 4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') #", "def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype", "def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return", "scenario: user wants to find out who owns a property (IDA + payment)", "step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def", "render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return render_template('register-view/register-hybrid.html') # ---------------------------------------------------------------------------", "sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html')", "2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html',", "render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return", "6 - Client 2 visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') #", "GOV.UK Verification --------------------- # Step 8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def", "in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card", "@app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def register_3_0():", "2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 -", "def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4", "fake title) # starts on GOV.UK and flows into register view @app.route('/find-owner/c/search') def", "return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and", "GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view():", "find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def", "render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view. V4 with help", "# GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in')", "return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2():", "# Step 3 - results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return", "return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen confirms v2.0 --------", "Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step", "asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home(): return render_template('index.html')", "render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 - experian 2nd phase", "return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def", "Reserve Priority (Freeze register) --------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm():", "return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype", "return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed - reworked from sprint 2 -----------------------------------", "- Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK", "pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- #", "def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK Verification ---------------------", "render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK Verification --------------------- # Sub flow", "conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify", "relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def", "editable=False, data=data) # Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json',", "render_template('changes-view/changes-1.0.html') # Change history - historical only - nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0():", "results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property", "@app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1 -", "# Step 4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html')", "example_1(): return render_template('examples/example-page.html') if __name__ == '__main__': # Bind to PORT if defined,", "----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view", "we're having to download a \"legal copy\" then this page can be much", "editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer, Step 1", "on GOV.UK and flows into register view # Verify + Payment + real", "render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/register-view\") # end Sub flow - GOV.UK Verification --------------------- # GOV.UK pages,", "citizens sign transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\")", "@app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer", "1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5", "View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0')", "render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') #", "View -------------------------- # Change history - pending and historical @app.route('/changes-view/changes-1.0') def changes_1_0(): return", "in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 -", "property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- #", "scenario: user wants to find out who owns a property # starts on", "Step 4 - experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html')", "def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4", "@app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s)", "prototype 1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code')", "@app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") #", "render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html')", "# end card payment --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view')", "transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer')", "def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # ---------------------------------------------------------------------------", "# --------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login') def transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\")", "render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def", "def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return", "phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow", "Step 1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer", "listing ----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details", "return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 - experian 2nd phase sign", "next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 - experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2')", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') #", "render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card payment --------------------- # pay to", "Sprint 3, prototype 1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return", "flow - card payment --------------------- # --------------------------------------------------------------------------- # scenario: user wants to find", "step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") #", "def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def", "@app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants to find", "render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def", "in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK", "prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data)", "def common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search / start", "def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen confirms", "@app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm():", "--------------------------------------- @app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def", "Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return", "flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return", "Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data)", "GOV.UK pages, results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK", "def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants to find", "return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian", "Transfer prototypes - 2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def transfer_2nd_conveyancer_case_list(): json_data=open('app/static/data/cases-seller.json',", "def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0')", "----------------- @app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0", "conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data)", "2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/d/card-payment\") # end Verify", "# govuk_template asset path @app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/'", "next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return", "Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data =", "withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data)", "conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html')", "# GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\")", "return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed')", "next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1():", "relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow", "return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn():", "... something about a property # starts on GOV.UK and flows into register", "can be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html', next_page=\"404\") # ---------------------------------------------------------------------------", "render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # ---------------------------------------------------------------------------", "pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub", "----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # scenario: user wants to", "return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller')", "render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start')", "Register Changes View -------------------------- # Change history - pending and historical @app.route('/changes-view/changes-1.0') def", "conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 -", "render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register(): return render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint", "data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new provisions page", "render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0():", "@app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- #", "with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- #", "def client_confirm_2_2(): return render_template('relationship-starts/client-confirm-2.2.html') # Step 4 - Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed')", "verify - Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2')", "sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') # Sprint 3, prototype 1, conveyancer - buyer relationship --------------------------", "Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html')", "find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 -", "return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def", "sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for", "reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login():", "v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register", "find out ... something about a property # starts on GOV.UK and flows", "designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__ == '__main__': # Bind", "# Sub flow - card payment --------------------- # GOV.UK pages, accept cost to", "Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- #", "--------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2():", "--------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html',", "spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete():", "def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s) confirm", "render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/d/who-verified-you\") # verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\")", "404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def", "# Verify --------------------- # verify - Step 1 @app.route('/find-owner/d/verify') def find_owner_d_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html',", "- Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step", "# verify - Step 4 - experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def", "return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer initiates v2.2 --------", "2nd conveyancer, Step 4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def transfer_2nd_conveyancer_marked_ready(): return", "import Environment app = Flask(__name__) app.debug = True # govuk_template asset path @app.context_processor", "render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2():", "# card payment --------------------- # pay to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment():", "Sub flow Step 3 - experian sign in @app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html',", "summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data)", "return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow Step 2 - who verified", "render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify - Sub flow Step 2 - who verified you", "@app.route('/find-owner/property-details-2.0') def find_owner_details_2_0(): return render_template('user-find-owner/property-details-2.0.html', next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff -----------------", "in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification", "----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def", "@app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK Verification", "Step 2 - find correct property @app.route('/relationship-starts/conveyancer-find-property') def conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step", "-------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return render_template('register-view/register-2.1.html') @app.route('/register-view/register-3.0') def", "render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with GOV.UK Verify - use sub flow...", "property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow", "relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title')", "# pay to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/d/register-view\") #", "sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # ---------------------------------------------------------------------------", "log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 - find", "return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow Step 2 - who", "def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2 receives (all parties)", "experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end", "Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html')", "# end sub flow - card payment --------------------- # --------------------------------------------------------------------------- # scenario: user", "@app.route('/find-owner/experian-sign-in') def find_owner_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step", "4 - experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\")", "token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1", "Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client", "= json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with no", "- GOV.UK Verification --------------------- # Step 8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token')", "# Transfer prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\")", "confirmation @app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts,", "- experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify", "return render_template('relationship-starts/register-2.1-no-pending.html') # Step 6 - Client 2 visits start page @app.route('/relationship-starts/client-2-start') def", "starts, conveyancer initiates v2.2 -------- @app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1", "\"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows,", "def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def", "----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def", "search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages,", "transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing():", "def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub flow Step 3 -", "render_template('sprint-3/buyer-conveyancer/buyer-3-register.html') # Sprint 3, Execute Deed - reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start')", "data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a - external", "return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0():", "Step 7 - login with GOV.UK Verify - use sub flow... # Sub", "and flows into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") # GOV.UK", "details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register", "def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return", "editable=False, data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v2.0", "return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK pages, pay to view register ----------------- @app.route('/find-owner/b/card-payment') def", "the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html',", "5 - set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') #", "render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def govuk_property_details_2_1(): return render_template('govuk-views/property-details-2.1.html')", "def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the number of clients", "nothing pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example", "def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login with GOV.UK Verify -", "'__main__': # Bind to PORT if defined, otherwise default to 5000. port =", "def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home(): return", "mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1():", "# Step 1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") #", "prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def register_2_1(): return", "verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1():", "json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes,", "next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK Verification --------------------- # Sub flow -", "json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that has been", "Page prototypes, Register Changes View -------------------------- # Change history - pending and historical", "Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify", "return render_template('user-find-owner/search.html', next_page=\"/find-owner/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/results') def find_owner_results(): return", "sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers ----------------------------------------- @app.route('/examples/example-1') def example_1():", "# Sprint 2, spike - Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html')", "for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify", "experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub", "# Sprint 2, prototype 1: Passing a \"token\" ----------------------------------------- @app.route('/sprint-2/token') def sprint_2_token(): return", "Transfer prototypes - 2nd conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer():", "conveyancer_add_clients_2_2(): return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2():", "render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # ---------------------------------------------------------------------------", "@app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def", "# end Sub flow - GOV.UK Verification --------------------- # Step 2 - Client", "4 - experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\")", "+ ABR + '.json', \"r\") data = json.load(json_data) return render_template('casework/case-details.html', data=data, backpage='/casework/cases') #", "Alternate Register view. V4 with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html',", "end Sub flow - GOV.UK Verification --------------------- # Step 8 - Client 2", "def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\") # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Alternate Register view.", "GOV.UK Verify - use sub flow... # Sub flow - GOV.UK Verification ---------------------", "@app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification ---------------------", "@app.route('/relationship-starts/client-2-experian-sign-in') def relationship_starts_client_2_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4", "end Sub flow - GOV.UK Verification --------------------- # Step 2 - Client 1", "def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub flow Step 2", "# Transaction flows, relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return", "common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages, search / start v2.0", "Sprint 3, prototype 1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return", "Example pages - for designers ----------------------------------------- @app.route('/examples/example-1') def example_1(): return render_template('examples/example-page.html') if __name__", "semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship", "def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES!", "def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def casework_case_list(): json_data=open('app/static/data/casework-list.json',", "pages, property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- #", "transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False,", "return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian", "Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step", "def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def", "Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes,", "# ----------------- @app.route('/common/payment') def common_payment(): return render_template('common/payment.html', next_page=\"/\") # --------------------------------------------------------------------------- # GOV.UK pages,", "@app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register view --------------------------", "4 - Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html', next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\")", "transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False,", "render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def", "Sub flow Step 2 - who verified you @app.route('/relationship-starts/client-who-verified-you') def relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html')", "sub flow - card payment --------------------- # --------------------------------------------------------------------------- # scenario: user wants to", "1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes", "return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def", "confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes,", "prototypes - 2nd conveyancer, Step 4 - transfer ready to sign @app.route('/transfer-2nd-con/marked-ready') def", "add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add", "next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify - Sub", "render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype list @app.route('/casework/cases') def", "- 2nd conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return render_template('common/login.html',", "real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results", "download a \"legal copy\" then this page can be much more straightforward @app.route('/register-view/register-view-citizen-1')", "title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") # GOV.UK pages, results listing -----------------", "render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html')", "into register view @app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results", "4 - experian 2nd phase sign in @app.route('/relationship-starts/client-experian-sign-in-part-2') def relationship_starts_client_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') #", "transfer_login(): return render_template('common/login.html', next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json',", "@app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token():", "mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 - Client", "correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated", "def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers ----------------------------------------- @app.route('/examples/example-1') def", "data=data) # Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') #", "property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages,", "@app.route('/find-owner/c/results') def find_owner_c_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0 -----------------", "return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a - external process step -", "@app.route('/find-owner/c/search') def find_owner_c_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/c/results\") # GOV.UK pages, results listing ----------------- @app.route('/find-owner/c/results')", "@app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") # Verify --------------------- # verify - Step", "# Sprint 3, prototype 1, buyer -> conveyancer relationship -------------------------- @app.route('/sprint-3/buyer-login') def sprint_3_buyer_login():", "return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token')", "@app.route('/sprint-2/token') def sprint_2_token(): return render_template('sprint-2/token/citizen-1-register.html') @app.route('/sprint-2/select-action') def sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method():", "Step 3a - external process step - show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def", "- Client 2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 -", "return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow - card payment --------------------- # ---------------------------------------------------------------------------", "# Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing-seller') def transfer_signing_seller(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data", "render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers -----------------------------------------", "find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 - experian sign in", "cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") # GOV.UK", "add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation", "flow Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\")", "payment --------------------- # pay to view register ----------------- @app.route('/find-owner/d/card-payment') def find_owner_d_card_payment(): return render_template('common/payment.html',", "'prototypes_asset_path': '/static/' } @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'),", "@app.route('/govuk/property-details-2.0') def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1')", "return render_template('common/proto-404.html'), 404 @app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html')", "# Step 1 - login with GOV.UK Verify - use sub flow... #", "sprint_2_select_action(): return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html')", "Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3", "summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return", "relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/who-verified-you\") # GOV.UK verify - Sub", "v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow - GOV.UK", "3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify", "GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view():", "next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2') def hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\")", "Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data)", "and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 -", "sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') # --------------------------------------------------------------------------- # Sprint 2, prototype 1: Passing a \"token\"", "confirm v2.2 -------- @app.route('/relationship-starts/client-start') def client_start_2_2(): return render_template('relationship-starts/client-start-2.2.html') # Step 1 - login", "--------------------- # Step 8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return", "pages, property details v2.0 ----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK", "for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK verify", "mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_3_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"/transfer-and-charge-v3/citizen-1-sign-transfer\") # Step 3 - Client", "# Sprint 3, prototype 1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start():", "@app.route('/transfer-2nd-con/review-transfer') def transfer_2nd_conveyancer_review_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\")", "show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return", "def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def", "# GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/find-owner/b/experian-sign-in')", "conveyancer_find_property_2_2(): return render_template('relationship-starts/conveyancer-find-property-2.2.html') # Step 3 - results and select correct property @app.route('/relationship-starts/conveyancer-select-property')", "Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html')", "and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1", "def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html') # Step 9 - generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2():", "2 confirms @app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2", "return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- # Step 2", "verify - Sub flow Step 1 - for conveyancer create relationship flow @app.route('/find-owner/verify')", "Sub flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2():", "- GOV.UK Verification --------------------- # Step 2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token')", "--------------------- # GOV.UK pages, accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost():", "next_page=\"/transfer/conveyancer-case-list\") # Transfer prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data =", "sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4 - Client", "def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return", "1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction", "enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client", "@app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) #", "GOV.UK and flows into register view @app.route('/find-owner/b/search') def find_owner_b_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/b/results\") #", "def register_3_0(): return render_template('register-view/register-3.0.html') @app.route('/register-view/register-test-title') def register_test_title(): return render_template('register-view/register-test-title.html') @app.route('/register-view/register-hybrid') def register_hybrid(): return", "a \"legal copy\" then this page can be much more straightforward @app.route('/register-view/register-view-citizen-1') def", "GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\")", "sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint 2, spike -", "conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn():", "Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/b/experian-sign-in\") # GOV.UK verify - Sub", "prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- #", "render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def", "return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify", "def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages -", "relationship starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # ---------------------------------------------------------------------------", "flow Step 2 - who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/experian-sign-in\")", "sign in @app.route('/relationship-starts/client-2-experian-sign-in-part-2') def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK", "results listing ----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property", "@app.route('/404') def edge_of_proto(e): return render_template('common/proto-404.html') @app.route('/proto') def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday():", "@app.route('/find-owner/d/results') def find_owner_d_results(): return render_template('user-find-owner/results-c.html', next_page=\"/find-owner/d/property-details-2.0\") # GOV.UK pages, property details v2.0 -----------------", "def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi", "casework_case_list(): json_data=open('app/static/data/casework-list.json', \"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>')", "render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify - Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def", "def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer(): return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return", "transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer", "@app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters", "return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge-v3/citizen-1-sign-mortgage')", "- log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html', next_page=\"/relationship-starts/conveyancer-find-property\") # Step 2 -", "starts, citizen confirms v2.0 -------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- #", "next_page=\"/find-owner/verify\") # GOV.UK pages, IDA/Credit Card/login stuff ----------------- # Step 1 - login", "pay to view register ----------------- @app.route('/find-owner/b/card-payment') def find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end", "Verification --------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify():", "find out who owns a property # starts on GOV.UK and flows into", "verify - Step 2 @app.route('/find-owner/d/who-verified-you') def find_owner_d_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/d/experian-sign-in\") # verify -", "--------------------- # card payment --------------------- # pay to view register ----------------- @app.route('/find-owner/d/card-payment') def", "signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0(): return render_template('transfer-and-charge/citizen-1-sign-mortgage-2.0.html', next_page=\"citizen-1-sign-transfer\") # Step 4 -", "confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can now", "def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer prototypes, mortgage details page @app.route('/transfer/mortgage-details') def transfer_mortgage_details():", "process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start')", "return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return render_template('sprint-2/deed/buyer-2-execution-complete.html') # Example pages - for designers", "def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return", "@app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed(): return render_template('sprint-3/deed/buyer-1-sign-charge.html') @app.route('/sprint-3/display-transfer-for-signing') def sprint_3_execute_transfer():", "asset path @app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/')", "wants to find out who owns a property rouute c - (IDA) (real", "render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4 - experian 2nd", "- external process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html',", "\"r\") data = json.load(json_data) return render_template('casework/case-list.html', data=data) #casework details page @app.route('/casework/cases/<ABR>') def casework_case_details(ABR):", "find_owner_c_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 -", "render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def sprint_2_input_token(): return render_template('sprint-2/token/conveyancer-1-input-token.html') @app.route('/sprint-2/retrieve-token') def sprint_2_retrieve_token(): return render_template('sprint-2/token/conveyancer-2-retrieve-details.html') # Sprint", "json.load(json_data) return render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes - 2nd conveyancer, Step", "enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step 9 - Client 2", "charge v3 ----------------- # Step 1a - external process step - show user", "next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html',", "render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, summary with no mortgage details page", "@app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete(): return render_template('sprint-4/relationship/citizen-complete.html') @app.route('/sprint-4/citizen-register') def sprint_4_citizen_register(): return render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- #", "--------------------- # GOV.UK verify - Sub flow Step 1 - for conveyancer create", "v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0(): return render_template('govuk-views/results-2.0.html') # GOV.UK pages, property details v2.0", "render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship verifier", "now view the register if they want to. @app.route('/relationship-starts/client-view-register') def client_view_register_2_1(): return render_template('relationship-starts/register-2.1-no-pending.html')", "\"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary with", "view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1')", "# --------------------------------------------------------------------------- # GOV.UK pages, search / start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0():", "- GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/c/verify')", "flow Step 1 - for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return", "render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login page @app.route('/transfer/login') def transfer_login(): return", "@app.route('/transfer/done') def transfer_done(): return render_template('transfer/done.html') # Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing')", "Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000))", "# Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_2_0(): return", "--------------------------------------------------------------------------- # Page prototypes, Example mortgage agreement -------------------------- @app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html')", "def relationship_starts_client_2_verify_experian_sign_in_2nd_part_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- #", "prototypes, conveyancer-case-list page @app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html',", "return render_template('sprint-2/token/citizen-2-select-action.html') @app.route('/sprint-2/choose-method') def sprint_2_choose_method(): return render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change')", "citizens sign transfer and charge v3 ----------------- # Step 1a - external process", "prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data =", "= json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) # Transfer prototypes, transfer that has", "@app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed(): return render_template('reserve-priority/protect-confirmed-2.0.html') # --------------------------------------------------------------------------- # Sprint 4, Relationship verifier flow", "Environment app = Flask(__name__) app.debug = True # govuk_template asset path @app.context_processor def", "# Transfer prototypes - 2nd conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login') def", "----------------- @app.route('/find-owner/register-view') def find_owner_register_view(): return render_template('user-find-owner/register-3.0.html', next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0", "Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1():", "@app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) #", "Execute Deed - reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html')", "to find out who owns a property (IDA + payment) # starts on", "out who owns a property (IDA + payment) # starts on GOV.UK and", "# Step 2 - Client 1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html')", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view') def find_owner_c_register_view(): return render_template('register-view/register-test-title.html') # ---------------------------------------------------------------------------", "return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the number of clients @app.route('/relationship-starts/conveyancer-add-clients') def", "- for conveyancer create relationship flow @app.route('/relationship-starts/client-2-login') def client_2_verify_2_0(): return render_template('relationship-starts/verify-subflow-client-2/verify-intro.html') # GOV.UK", "client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client 1 confirms @app.route('/relationship-starts/client-confirm') def client_confirm_2_2():", "----------------- @app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0", "GOV.UK verify - Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def", "return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return", "- Client 1 receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5", "Verification --------------------- # Sub flow - card payment --------------------- # GOV.UK pages, accept", "def govuk_property_details_2_0(): return render_template('govuk-views/property-details-2.0.html') # GOV.UK pages, property details v2.1 ----------------- @app.route('/govuk/property-details-2.1') def", "@app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login page", "sign transfer and charge v3 ----------------- # Step 1a - external process step", "return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference')", "conveyancer create relationship flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify -", "show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # --------------------------------------------------------------------------- # Transfer prototypes, login", "Sub flow Step 2 - who verified you @app.route('/find-owner/who-verified-you') def find_owner_verify_who(): return render_template('user-find-owner/govuk-verify/verify-who.html',", "associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2(): return render_template('relationship-starts/conveyancer-select-task-2.2.html') # Step 5 - set the", "render_template('casework/case-details.html', data=data, backpage='/casework/cases') # --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1')", "user wants to find out who owns a property (IDA + payment) #", "Sub flow Step 3 - experian sign in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html')", "sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html')", "return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def", "Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data =", "data=data) # Transfer prototypes, create transfer page @app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data", "return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in @app.route('/relationship-starts/login') def relationship_starts_login_2_2(): return render_template('common/login.html',", "relationship_starts_client_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow Step 3 - experian", "pending @app.route('/changes-view/changes-no-pending-1.0') def changes_no_pending_1_0(): return render_template('changes-view/changes-no-pending-1.0.html') # --------------------------------------------------------------------------- # Page prototypes, Example mortgage", "return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2 receives (all parties) confirmation @app.route('/relationship-starts/clients-confirmed')", "# GOV.UK pages, accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return", "Step 1 - for conveyancer create relationship flow @app.route('/find-owner/verify') def find_owner_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html',", "find_owner_b_card_payment(): return render_template('common/payment.html', next_page=\"/find-owner/register-view\") # end sub flow - card payment --------------------- #", "@app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow Step 2", "V4 with help on show @app.route('/register-view/register-view-4-help-text') def register_view_4_0_help_text(): return render_template('register-view/register-test-title-help.html', next_page=\"404\") # ---------------------------------------------------------------------------", "start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing", "- use sub flow... # Sub flow - GOV.UK Verification --------------------- # GOV.UK", "next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you') def find_owner_c_verify_who(): return", "json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) # Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def", "- reworked from sprint 2 ----------------------------------- @app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def", "return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def", "prototypes - 2nd conveyancer, Step 1 - login page @app.route('/transfer-2nd-con/login') def transfer_2nd_conveyancer_login(): return", "who verified you @app.route('/relationship-starts/client-2-who-verified-you') def relationship_starts_client_2_verify_who_1(): return render_template('relationship-starts/verify-subflow-client-2/verify-who.html') # GOV.UK verify - Sub", "6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7", "1, conveyancer - buyer relationship -------------------------- @app.route('/sprint-3/conveyancer-start') def sprint_3_conveyancer_start(): return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def", "- Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def", "def proto(): return render_template('index2.html') @app.route('/hack-day') def hackday(): return render_template('index-hack.html') # --------------------------------------------------------------------------- #casework prototype", "experian sign in @app.route('/find-owner/b/experian-sign-in') def find_owner_b_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/b/experian-sign-in-part-2\") # GOV.UK verify -", "- GOV.UK Verification --------------------- # GOV.UK verify - Sub flow Step 1 -", "def transfer_2nd_conveyancer_login(): return render_template('common/login.html', next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer, Step 2", "return render_template('sprint-3/buyer-conveyancer/conveyancer-0-start.html') @app.route('/sprint-3/conveyancer-login') def sprint_3_conveyancer_login(): return render_template('sprint-3/buyer-conveyancer/conveyancer-1-login.html') @app.route('/sprint-3/conveyancer-enter-title') def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers')", "- Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK", "transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login')", "show user sms message @app.route('/transfer-and-charge-v3/citizen-1-sms') def transfer_and_charge_citizen_1_sms_3_0(): return render_template('transfer-and-charge/citizen-1-sms-2.0.html', next_page=\"citizen-1-2-factor-auth\") # Step 4", "Transfer prototypes, transfer that has been withdrawn @app.route('/transfer/transfer-withdrawn') def transfer_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data", "@app.route('/sprint-3/buyer-login') def sprint_3_buyer_login(): return render_template('sprint-3/buyer-conveyancer/buyer-1-login.html') @app.route('/sprint-3/buyer-ref-code') def sprint_3_buyer_ref_code(): return render_template('sprint-3/buyer-conveyancer/buyer-2-reference-code.html') @app.route('/sprint-3/buyer-register') def sprint_3_buyer_register():", "----------------- # Step 1a - external process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email')", "GOV.UK verify - Sub flow Step 4 - experian 2nd phase sign in", "Step 4 - Client 1 2 factor authentication @app.route('/transfer-and-charge-v3/citizen-1-2-factor-auth') def transfer_and_charge_citizen_1_2_factor_auth(): return render_template('transfer-and-charge/citizen-1-2-factor.html',", "render_template('sprint-2/token/citizen-3-choose-method.html') @app.route('/sprint-2/generate-token') def sprint_2_generate_token(): return render_template('sprint-2/token/citizen-4-generate-token.html') @app.route('/sprint-2/show-change') def sprint_2_show_change(): return render_template('sprint-2/token/citizen-5-register-during-change.html') @app.route('/sprint-2/input-token') def", "def sprint_3_conveyancer_enter_title(): return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return", "2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") #", "@app.route('/find-owner/b/results') def find_owner_b_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/b/property-details-2.0\") # GOV.UK pages, property details v2.0 -----------------", "render_template('relationship-starts/verify-subflow-client-2/verify-sign-in-2.html') # end Sub flow - GOV.UK Verification --------------------- # Step 8 -", "json.load(json_data) return render_template('transfer/transfer-signing.html', editable=False, data=data, role=\"buyer\") # Transfer prototypes, signing the transfer page", "return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1(): return render_template('sprint-3/register-view/register-v1a-history-1.html') #", "wants to find out who owns a property (IDA + payment) # starts", "return render_template('user-find-owner/govuk-verify/verify-who.html', next_page=\"/find-owner/c/experian-sign-in\") # GOV.UK verify - Sub flow Step 3 - experian", "- Execute Deed ----------------------------------------- @app.route('/sprint-2/execute-deed') def sprint_2_execute_deed(): return render_template('sprint-2/deed/buyer-1-execute-deed.html') @app.route('/sprint-2/execution-complete') def sprint_2_execution_complete(): return", "conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, client(s) confirm v2.2", "return render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return", "\"r\") data = json.load(json_data) return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer", "citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def", "relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow Step 4 - experian", "8 - Client 2 enters token @app.route('/relationship-starts/client-2-enter-token') def client_2_enter_token_2_0(): return render_template('relationship-starts/client-2-enter-token-2.0.html') # Step", "/ start v2.0 ----------------- @app.route('/govuk/search-2.0') def govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results", "role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer, Step 1 - login", "# Transfer prototypes, new provisions page @app.route('/transfer/new-provisions') def transfer_new_provisions(): return render_template('transfer/new-provisions.html') # Transfer", "data=data) # Transfer prototypes, summary with empty states @app.route('/transfer/transfer-empty-states') def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\")", "1 - login with GOV.UK Verify @app.route('/transfer-and-charge/citizen-1-login') def transfer_and_charge_citizen_1_login_2_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") #", "Sub flow Step 3 - experian sign in @app.route('/find-owner/c/experian-sign-in') def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html',", "transfer and charge v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step", "next_page=\"/find-owner/d/card-payment\") # end Verify --------------------- # card payment --------------------- # pay to view", "render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow Step 2 - who verified you", "Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/experian-sign-in-part-2') def find_owner_verify_experian_sign_in_2nd_part_1():", "next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage') def transfer_and_charge_citizen_1_sign_mortgage_2_0():", "7 - add 2nd client @app.route('/relationship-starts/conveyancer-add-client-2') def conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8", "- generated token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows,", "# Transfer prototypes - 2nd conveyancer, Step 3 - confirm page @app.route('/transfer-2nd-con/review-transfer') def", "@app.context_processor def asset_path_context_processor(): return { 'asset_path': '/static/govuk-template/', 'prototypes_asset_path': '/static/' } @app.route('/') def home():", "Sub flow Step 4 - experian 2nd phase sign in @app.route('/find-owner/b/experian-sign-in-part-2') def find_owner_b_verify_experian_sign_in_2nd_part_1():", "render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint", "# Transfer prototypes, signing the transfer page @app.route('/transfer/transfer-signing') def transfer_signing(): json_data=open('app/static/data/ready-to-sign-transfer.json', \"r\") data", "-------- @app.route('/relationship-starts/citizen-confirms') def citizen_confirms_2_0(): return render_template('relationship-starts/citizen-confirms-2.0.html') # --------------------------------------------------------------------------- # Page prototypes, Register View", "1 enters token @app.route('/relationship-starts/client-enter-token') def client_enter_token_2_1(): return render_template('relationship-starts/client-enter-token-2.1.html') # Step 3 - Client", "def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-no-mortgage.html', editable=True, conveyancer=\"buyer\", data=data) #", "visits start page @app.route('/relationship-starts/client-2-start') def client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login", "render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data =", "return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/b/card-payment\") # end Sub flow - GOV.UK Verification --------------------- # Sub", "next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return", "client_2_start_2_2(): return render_template('relationship-starts/client-2-start-2.2.html') # Step 7 - login with GOV.UK Verify - use", "@app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') # Sprint 3, prototype 1, buyer -> conveyancer", "return render_template('examples/example-page.html') if __name__ == '__main__': # Bind to PORT if defined, otherwise", "then this page can be much more straightforward @app.route('/register-view/register-view-citizen-1') def register_view_citizen_1(): return render_template('register-view/register-view-citizen-1.html',", "next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification --------------------- # GOV.UK verify - Sub", "GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/d/property-details-2.0') def find_owner_d_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/d/verify\") #", "receives confirmation @app.route('/relationship-starts/client-semi-confirmed') def client_semi_confirmed_2_2(): return render_template('relationship-starts/client-semi-confirmed-2.2.html') # Step 5 - Client can", "conveyancer_add_client_2_2_2(): return render_template('relationship-starts/conveyancer-add-client-2-2.2.html') # Step 8 - confirmation @app.route('/relationship-starts/conveyancer-confirm') def conveyancer_confirm_2_2(): return render_template('relationship-starts/conveyancer-confirm-2.2.html')", "next_page=\"/find-owner/c/property-details-2.0\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/property-details-2.0') def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html',", "data=data) # --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v2.0 -----------------", "verifier flow -------------------------- @app.route('/sprint-4/citizen-reference') def sprint_4_reference(): return render_template('sprint-4/relationship/citizen-reference.html') @app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html')", "find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK Verification --------------------- #", "next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0(): return", "- Client 1 signs transfer @app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step", "return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return", "render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\") # Step 2 - Client 1 enters token @app.route('/transfer-and-charge-v3/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_3_0():", "conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html') # Step 4 - select associated task @app.route('/relationship-starts/conveyancer-select-task') def conveyancer_select_task_2_2():", "return render_template('sprint-3/deed/buyer-1a-sign-transfer.html') @app.route('/sprint-3/two-factor') def sprint_3_two_factor(): return render_template('sprint-3/deed/buyer-2-two-factor.html') @app.route('/sprint-3/signing-complete') def sprint_3_signing_complete(): return render_template('sprint-3/deed/buyer-3-signing-complete.html') #", "govuk_search_2_0(): return render_template('govuk-views/search-2.0.html') # GOV.UK pages, results listing v2.0 ----------------- @app.route('/govuk/results-2.0') def govuk_results_2_0():", "@app.route('/relationship-starts/clients-confirmed') def clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen", "# starts on GOV.UK and flows into register view @app.route('/find-owner/search') def find_owner_search(): return", "# --------------------------------------------------------------------------- # Transaction flows, citizens sign transfer and charge v3 ----------------- #", "render_template('transfer-2nd-conveyancer/review-transfer.html', editable=False, data=data, role=\"seller\") # Transfer prototypes - 2nd conveyancer, Step 4 -", "@app.route('/transfer/create-transfer') def create_transfer(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/create-transfer.html', editable=True, data=data) #", "return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\") # Step 3 - Client 1 signs mortgage deed @app.route('/transfer-and-charge/citizen-1-sign-mortgage')", "@app.route('/transfer/conveyancer-case-list') def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer", "has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def transfer_2nd_con_withdrawn(): json_data=open('app/static/data/withdrawn-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html',", "@app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/d/experian-sign-in-part-2\") # verify - Step 4 - experian", "conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes, create", "flow @app.route('/relationship-starts/client-login') def client_verify_2_2(): return render_template('relationship-starts/verify-subflow-client-1/verify-intro.html') # GOV.UK verify - Sub flow Step", "def find_owner_c_details_2_0(): return render_template('user-find-owner/property-details-2.1-c.html', next_page=\"/find-owner/c/verify\") # Sub flow - GOV.UK Verification --------------------- #", "# GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/b/verify') def find_owner_b_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html',", "Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return", "data = json.load(json_data) return render_template('transfer-2nd-conveyancer/conveyancer-case-list.html', data=data) # Transfer prototypes - 2nd conveyancer, Step", "clients_confirmed_2_2(): return render_template('relationship-starts/clients-confirmed-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, citizen confirms v2.0", "next_page=\"/find-owner/changes-view\") # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html',", "scenario: user wants to find out ... something about a property # starts", "@app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return render_template('sprint-3/register-view/register-v1.html') @app.route('/sprint-3/register-v1a-history') def sprint_3_register_v1a_history(): return render_template('sprint-3/register-view/register-v1a-history.html') @app.route('/sprint-3/register-v1a-history-1') def sprint_3_register_v1a_history_1():", "accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def find_owner_b_accept_cost(): return render_template('user-find-owner/accept-cost.html', next_page=\"/find-owner/b/card-payment\") #", "next_page=\"/transfer-and-charge-v3/citizen-1-semi-confirmed\") # Step 5 - Client 1 - semi confirmation @app.route('/transfer-and-charge-v3/citizen-1-semi-confirmed') def transfer_and_charge_citizen_1_semi_confirmed_3_0():", "@app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return render_template('hackday/land-record-1.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-2')", "def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html', editable=True, conveyancer=\"buyer\", data=data) #", "def find_owner_c_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html', next_page=\"/find-owner/c/experian-sign-in-part-2\") # GOV.UK verify - Sub flow Step 4", "next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def find_owner_b_verify_who(): return", "@app.route('/changes-view/changes-1.0') def changes_1_0(): return render_template('changes-view/changes-1.0.html') # Change history - historical only - nothing", "def transfer_and_charge_citizen_1_semi_confirmed_3_0(): return render_template('transfer-and-charge/citizen-1-semi-confirmed-2.0.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts, conveyancer initiates", "find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/c/who-verified-you')", "Register view. V4 with sections fully open @app.route('/register-view/register-view-4-expanded') def register_view_4_0_expanded(): return render_template('register-view/register-test-title-expanded.html', next_page=\"404\")", "Transfer prototypes, summary with option to withdraw @app.route('/transfer/summary-withdraw-option') def transfer_withdraw_option(): json_data=open('app/static/data/complete-transfer.json', \"r\") data", "def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login with GOV.UK Verify", "summary with no mortgage details page @app.route('/transfer/summary-no-mortgage') def transfer_summary_no_mortgage(): json_data=open('app/static/data/no-mortgage.json', \"r\") data =", "v2.0 ----------------- @app.route('/transfer-and-charge/citizen-1-start') def transfer_and_charge_citizen_1_start_2_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step 1 - login", "Transfer prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer", "next_page=\"/transfer-2nd-con/conveyancer-case-list\") # Transfer prototypes - 2nd conveyancer, Step 2 - conveyancer-case-list @app.route('/transfer-2nd-con/conveyancer-case-list') def", "----------------- @app.route('/find-owner/results') def find_owner_results(): return render_template('user-find-owner/results.html', next_page=\"/find-owner/property-details-2.0\") # GOV.UK pages, property details v2.0", "render_template('relationship-starts/verify-subflow-client-1/verify-who.html') # GOV.UK verify - Sub flow Step 3 - experian sign in", "- Step 4 - experian 2nd phase sign in @app.route('/find-owner/d/experian-sign-in-part-2') def find_owner_d_verify_experian_sign_in_2nd_part_1(): return", "render_template('sprint-4/relationship/citizen-register.html') # --------------------------------------------------------------------------- # Sprint 3, Register view -------------------------- @app.route('/sprint-3/register-v1') def sprint_3_register_v1(): return", "# --------------------------------------------------------------------------- #hackday @app.route('/hackday/land-ownership-record') def hackday_land_record(): return render_template('hackday/land-record.html', next_page=\"404\") @app.route('/hackday/land-ownership-record-1') def hackday_land_record_1(): return", "Verify + Payment + real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\")", "1a - external process step - show user email @app.route('/transfer-and-charge-v3/citizen-1-email') def transfer_and_charge_citizen_1_email_3_0(): return", "token @app.route('/relationship-starts/conveyancer-token') def conveyancer_token_2_2(): return render_template('relationship-starts/conveyancer-token-2.2.html') # --------------------------------------------------------------------------- # Transaction flows, relationship starts,", "return render_template('sprint-3/buyer-conveyancer/conveyancer-2-enter-title.html') @app.route('/sprint-3/conveyancer-add-buyers') def sprint_3_conveyancer_add_buyers(): return render_template('sprint-3/buyer-conveyancer/conveyancer-5-add-buyers.html') @app.route('/sprint-3/relationship-reference') def sprint_3_relationship_reference(): return render_template('sprint-3/buyer-conveyancer/conveyancer-6-ref-for-buyers.html') #", "def transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn')", "def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate Register view. V4 with sections", "page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary') def", "pages, property details v2.0 ----------------- @app.route('/find-owner/changes-view') def find_owner_historian_view(): return render_template('user-find-owner/changes-1.0.html', next_page=\"/\") # ---------------------------------------------------------------------------", "render_template('relationship-starts/conveyancer-add-clients-2.2.html') # Step 6 - add 1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html')", "Step 1 - login with GOV.UK Verify @app.route('/transfer-and-charge-v3/citizen-1-login') def transfer_and_charge_citizen_1_login_3_0(): return render_template('transfer-and-charge/citizen-1-login-2.0.html', next_page=\"citizen-1-enter-token\")", "return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/b/who-verified-you\") # GOV.UK verify - Sub flow Step 2 @app.route('/find-owner/b/who-verified-you') def", "details v2.0 ----------------- @app.route('/find-owner/b/property-details-2.0') def find_owner_b_details_2_0(): return render_template('user-find-owner/property-details-2.1.html', next_page=\"/find-owner/b/verify\") # Sub flow -", "page @app.route('/transfer/mortgage-details') def transfer_mortgage_details(): return render_template('transfer/mortgage-details.html') # Transfer prototypes, mortgage details entered page", "# Page prototypes, Register View -------------------------- @app.route('/register-view/register-2.0') def register_2_0(): return render_template('register-view/register-2.0.html') @app.route('/register-view/register-2.1') def", "1st client @app.route('/relationship-starts/conveyancer-add-client-1') def conveyancer_add_client_1_2_2(): return render_template('relationship-starts/conveyancer-add-client-1-2.2.html') # Step 7 - add 2nd", "@app.route('/reserve-priority/select') def reserve_priority_1_select(): return render_template('reserve-priority/protect-what-2.0.html') @app.route('/reserve-priority/confirm') def reserve_priority_2_confirm(): return render_template('reserve-priority/protect-confirm-2.0.html') @app.route('/reserve-priority/confirmed') def reserve_priority_3_confirmed():", "render_template from flask.ext.assets import Environment app = Flask(__name__) app.debug = True # govuk_template", "payment --------------------- # GOV.UK pages, accept cost to view register ----------------- @app.route('/find-owner/b/accept-cost') def", "def conveyancer_case_list(): json_data=open('app/static/data/cases.json', \"r\") data = json.load(json_data) return render_template('transfer/conveyancer-case-list.html', data=data) # Transfer prototypes,", "details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page", "wants to find out ... something about a property # starts on GOV.UK", "data = json.load(json_data) return render_template('transfer/transfer-withdrawn.html', editable=True, data=data) # Transfer prototypes, summary with option", "def transfer_and_charge_citizen_1_email_3_0(): return render_template('transfer-and-charge/citizen-1-email-2.0.html', next_page=\"citizen-1-start\") @app.route('/transfer-and-charge-v3/citizen-1-start') def transfer_and_charge_citizen_1_start_3_0(): return render_template('transfer-and-charge/citizen-1-start-2.0.html', next_page=\"citizen-1-login\") # Step", "a property # starts on GOV.UK and flows into register view # Verify", "property details v2.0 ----------------- @app.route('/find-owner/d/register-view') def find_owner_d_register_view(): return render_template('register-view/register-test-title.html') # --------------------------------------------------------------------------- # Alternate", "transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1') def transfer_agreement_1(): return render_template('legal-documents/transfer-agreement-v1.html') # --------------------------------------------------------------------------- # Reserve Priority", "@app.route('/transfer-and-charge/citizen-1-sign-transfer') def transfer_and_charge_citizen_1_sign_transfer_2_0(): return render_template('transfer-and-charge/citizen-1-sign-transfer-2.0.html', next_page=\"citizen-1-semi-confirmed\") # Step 5 - Client 1 -", "@app.route('/relationship-starts/conveyancer-start') def conveyancer_start_2_2(): return render_template('relationship-starts/conveyancer-start-2.2.html') # Step 1 - log in @app.route('/relationship-starts/login') def", "@app.route('/legal-documents/mortgage-agreement-v1') def mortgage_agreement_1(): return render_template('legal-documents/mortgage-agreement-v1.html') # Page prototypes, Example transfer agreement -------------------------- @app.route('/legal-documents/transfer-agreement-v1')", "json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary-withdraw-option.html', editable=False, data=data) # Transfer prototypes, summary", "in @app.route('/relationship-starts/client-experian-sign-in') def relationship_starts_client_verify_experian_sign_in_1(): return render_template('relationship-starts/verify-subflow-client-1/verify-sign-in.html') # GOV.UK verify - Sub flow Step", "next_page=\"/\") # --------------------------------------------------------------------------- # scenario: user wants to find out who owns a", "2nd phase sign in @app.route('/find-owner/c/experian-sign-in-part-2') def find_owner_c_verify_experian_sign_in_2nd_part_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub", "- login with GOV.UK Verify - use sub flow... # Sub flow -", "Step 3 - results and select correct property @app.route('/relationship-starts/conveyancer-select-property') def conveyancer_select_property_2_2(): return render_template('relationship-starts/conveyancer-select-property-2.2.html')", "return render_template('transfer/transfer-signing-seller.html', editable=False, data=data, role=\"seller\") # --------------------------------------------------------------------------- # Transfer prototypes - 2nd conveyancer,", "GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html', next_page=\"/find-owner/c/who-verified-you\")", "def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json',", "Step 2 - Client 1 enters token @app.route('/transfer-and-charge/citizen-1-enter-token') def transfer_and_charge_citizen_1_enter_token_2_0(): return render_template('transfer-and-charge/citizen-1-enter-token-2.0.html', next_page=\"citizen-1-sign-mortgage\")", "# GOV.UK verify - Sub flow Step 1 @app.route('/find-owner/c/verify') def find_owner_c_verify(): return render_template('user-find-owner/govuk-verify/verify-intro.html',", "@app.route('/relationship-starts/client-2-confirm') def client_2_confirm_2_2(): return render_template('relationship-starts/client-2-confirm-2.2.html') # Step 10 - Client 2 receives (all", "@app.route('/sprint-3/buyer-signing-start') def sprint_3_buyer_signing_start(): return render_template('sprint-3/deed/buyer-0-start.html') @app.route('/sprint-3/buyer-signing-login') def sprint_3_buyer_signing_login(): return render_template('sprint-3/deed/buyer-0a-login.html') @app.route('/sprint-3/display-charge-for-signing') def sprint_3_execute_deed():", "# If we're having to download a \"legal copy\" then this page can", "+ Payment + real fake title @app.route('/find-owner/d/search') def find_owner_d_search(): return render_template('user-find-owner/search.html', next_page=\"/find-owner/d/results\") #", "transfer_2nd_conveyancer_marked_ready(): return render_template('transfer-2nd-conveyancer/marked-ready.html') # Transfer prototypes, transfer that has been withdrawn @app.route('/transfer-2nd-con/transfer-withdrawn') def", "hackday_land_record_2(): return render_template('hackday/land-record-2.html', next_page=\"404\") # --------------------------------------------------------------------------- # LAST OF THE ALPHA PROTOTYPES! #", "flow - GOV.UK Verification --------------------- # GOV.UK pages, property details v2.0 ----------------- @app.route('/find-owner/c/register-view')", "} @app.route('/') def home(): return render_template('index.html') @app.errorhandler(404) def page_not_found(e): return render_template('common/proto-404.html'), 404 @app.route('/404')", "return render_template('user-find-owner/govuk-verify/verify-sign-in-2.html', next_page=\"/find-owner/c/register-view\") # end Sub flow - GOV.UK Verification --------------------- # GOV.UK", "@app.route('/sprint-4/citizen-login') def sprint_4_citizen_login(): return render_template('sprint-4/relationship/citizen-login.html') @app.route('/sprint-4/citizen-confirm') def sprint_4_citizen_confirm(): return render_template('sprint-4/relationship/citizen-confirm.html') @app.route('/sprint-4/citizen-complete') def sprint_4_citizen_complete():", "def transfer_empty_states(): json_data=open('app/static/data/incomplete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/transfer-empty-states.html', editable=True, data=data) # Transfer", "prototypes, mortgage details entered page @app.route('/transfer/mortgage-details-entered') def transfer_mortgage_details_entered(): return render_template('transfer/mortgage-details-entered.html') # Transfer prototypes,", "prototypes, summary page @app.route('/transfer/summary') def transfer_summary(): json_data=open('app/static/data/complete-transfer.json', \"r\") data = json.load(json_data) return render_template('transfer/summary.html',", "\"r\") data = json.load(json_data) return render_template('transfer/transfer-signing.html', next_page=\"/transfer-and-charge-v3/citizen-1-sms\", data=data, role=\"citizen\") # Step 3a -", "verify - Step 3 - experian sign in @app.route('/find-owner/d/experian-sign-in') def find_owner_d_verify_experian_sign_in_1(): return render_template('user-find-owner/govuk-verify/verify-sign-in.html'," ]
[ "keys,values in param.items(): print(keys) print(values) print \"Rejection rate: %.3f +- %.3f (%d /", "test time: %.5f sec\" % average_time os.chdir('..') #Minor: need to change the above", "counter, numTrials) print \"Average test time: %.5f sec\" % average_time os.chdir('..') #Minor: need", "\"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] = load(load_f) load_f.close() rate", "<gh_stars>0 '''Directly copied from MMD case''' from numpy import sqrt import os from", "print \"Parameters:\" for keys,values in param.items(): print(keys) print(values) print \"Rejection rate: %.3f +-", "load(load_f) load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this", "1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder is symmetrical in terms of rate'''", "/ %d)\" % (rate, stder, counter, numTrials) print \"Average test time: %.5f sec\"", "= \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] = load(load_f) load_f.close()", "load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param,", "rate: %.3f +- %.3f (%d / %d)\" % (rate, stder, counter, numTrials) print", "param, average_time, pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate)", "stder, counter, numTrials) print \"Average test time: %.5f sec\" % average_time os.chdir('..') #Minor:", "+- %.3f (%d / %d)\" % (rate, stder, counter, numTrials) print \"Average test", "load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] = load(load_f) load_f.close() rate =", "print(values) print \"Rejection rate: %.3f +- %.3f (%d / %d)\" % (rate, stder,", ") '''this stder is symmetrical in terms of rate''' print \"Parameters:\" for keys,values", "from MMD case''' from numpy import sqrt import os from pickle import load", "import sqrt import os from pickle import load import sys os.chdir(sys.argv[1]) load_filename =", "= counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder is symmetrical", "os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] =", "open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials) stder", "\"Parameters:\" for keys,values in param.items(): print(keys) print(values) print \"Rejection rate: %.3f +- %.3f", "(rate, stder, counter, numTrials) print \"Average test time: %.5f sec\" % average_time os.chdir('..')", "\"Rejection rate: %.3f +- %.3f (%d / %d)\" % (rate, stder, counter, numTrials)", "%.3f +- %.3f (%d / %d)\" % (rate, stder, counter, numTrials) print \"Average", "MMD case''' from numpy import sqrt import os from pickle import load import", "rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder is", "/ float(numTrials) ) '''this stder is symmetrical in terms of rate''' print \"Parameters:\"", "% (rate, stder, counter, numTrials) print \"Average test time: %.5f sec\" % average_time", "sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues]", "rate''' print \"Parameters:\" for keys,values in param.items(): print(keys) print(values) print \"Rejection rate: %.3f", "numTrials) print \"Average test time: %.5f sec\" % average_time os.chdir('..') #Minor: need to", "\"Average test time: %.5f sec\" % average_time os.chdir('..') #Minor: need to change the", "print(keys) print(values) print \"Rejection rate: %.3f +- %.3f (%d / %d)\" % (rate,", "'''Directly copied from MMD case''' from numpy import sqrt import os from pickle", "time: %.5f sec\" % average_time os.chdir('..') #Minor: need to change the above for", "print \"Average test time: %.5f sec\" % average_time os.chdir('..') #Minor: need to change", "average_time, pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) /", "numpy import sqrt import os from pickle import load import sys os.chdir(sys.argv[1]) load_filename", "param.items(): print(keys) print(values) print \"Rejection rate: %.3f +- %.3f (%d / %d)\" %", "load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] = load(load_f)", "symmetrical in terms of rate''' print \"Parameters:\" for keys,values in param.items(): print(keys) print(values)", "counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder is symmetrical in", "of rate''' print \"Parameters:\" for keys,values in param.items(): print(keys) print(values) print \"Rejection rate:", "pickle import load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter,", "numTrials, param, average_time, pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt(", "% average_time os.chdir('..') #Minor: need to change the above for Gaussian Kernel Median", "= load(load_f) load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) )", "for keys,values in param.items(): print(keys) print(values) print \"Rejection rate: %.3f +- %.3f (%d", "pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials)", "%.5f sec\" % average_time os.chdir('..') #Minor: need to change the above for Gaussian", "%d)\" % (rate, stder, counter, numTrials) print \"Average test time: %.5f sec\" %", "import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials, param, average_time,", "[counter, numTrials, param, average_time, pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials) stder =", "from numpy import sqrt import os from pickle import load import sys os.chdir(sys.argv[1])", "'''this stder is symmetrical in terms of rate''' print \"Parameters:\" for keys,values in", "os from pickle import load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f =", "in param.items(): print(keys) print(values) print \"Rejection rate: %.3f +- %.3f (%d / %d)\"", "float(numTrials) ) '''this stder is symmetrical in terms of rate''' print \"Parameters:\" for", "rate*(1-rate) / float(numTrials) ) '''this stder is symmetrical in terms of rate''' print", "%.3f (%d / %d)\" % (rate, stder, counter, numTrials) print \"Average test time:", "stder is symmetrical in terms of rate''' print \"Parameters:\" for keys,values in param.items():", "load_f.close() rate = counter/float(numTrials) stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder", "is symmetrical in terms of rate''' print \"Parameters:\" for keys,values in param.items(): print(keys)", "= open(load_filename,\"r\") [counter, numTrials, param, average_time, pvalues] = load(load_f) load_f.close() rate = counter/float(numTrials)", "stder = 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder is symmetrical in terms", "(%d / %d)\" % (rate, stder, counter, numTrials) print \"Average test time: %.5f", "average_time os.chdir('..') #Minor: need to change the above for Gaussian Kernel Median Heuristic", "from pickle import load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\")", "sqrt import os from pickle import load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\"", "case''' from numpy import sqrt import os from pickle import load import sys", "terms of rate''' print \"Parameters:\" for keys,values in param.items(): print(keys) print(values) print \"Rejection", "print \"Rejection rate: %.3f +- %.3f (%d / %d)\" % (rate, stder, counter,", "in terms of rate''' print \"Parameters:\" for keys,values in param.items(): print(keys) print(values) print", "sec\" % average_time os.chdir('..') #Minor: need to change the above for Gaussian Kernel", "copied from MMD case''' from numpy import sqrt import os from pickle import", "import os from pickle import load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f", "= 1.96*sqrt( rate*(1-rate) / float(numTrials) ) '''this stder is symmetrical in terms of", "import load import sys os.chdir(sys.argv[1]) load_filename = \"results.bin\" load_f = open(load_filename,\"r\") [counter, numTrials," ]
[ "name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article', name='categories', field=models.ManyToManyField(blank=True, through='help.ArticleCategory',", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations =", "('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField(", "Django 4.0.1 on 2022-02-01 17:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "on 2022-02-01 17:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article', name='categories',", "Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article', name='faq',", "model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order',", "= [ ('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False),", "field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article', name='categories', field=models.ManyToManyField(blank=True, through='help.ArticleCategory', to='help.Category'),", "class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article',", "migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article', name='categories', field=models.ManyToManyField(blank=True, through='help.ArticleCategory', to='help.Category'), ), ]", "2022-02-01 17:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('help',", "migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory',", "Generated by Django 4.0.1 on 2022-02-01 17:49 from django.db import migrations, models class", "'0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article',", "17:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'),", "dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True,", "= [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ),", "] operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order',", "name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000),", "operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000),", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations", "migrations, models class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations = [", "), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article',", "4.0.1 on 2022-02-01 17:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ]", "by Django 4.0.1 on 2022-02-01 17:49 from django.db import migrations, models class Migration(migrations.Migration):", "<filename>help/migrations/0004_article_faq_article_order_articlecategory_order_and_more.py # Generated by Django 4.0.1 on 2022-02-01 17:49 from django.db import migrations,", "[ ('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ),", "model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article', name='categories', field=models.ManyToManyField(blank=True,", "), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField( model_name='article', name='categories', field=models.ManyToManyField(blank=True, through='help.ArticleCategory', to='help.Category'), ),", "[ migrations.AddField( model_name='article', name='faq', field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField(", "# Generated by Django 4.0.1 on 2022-02-01 17:49 from django.db import migrations, models", "default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AlterField(", "models class Migration(migrations.Migration): dependencies = [ ('help', '0003_article_categories_articlecategory'), ] operations = [ migrations.AddField(", "field=models.BooleanField(db_index=True, default=False), ), migrations.AddField( model_name='article', name='order', field=models.PositiveIntegerField(default=1000), ), migrations.AddField( model_name='articlecategory', name='order', field=models.PositiveIntegerField(default=1000), )," ]
[ "in Fahrenheit): \") faren = float(faren) # The calculation on the right gets", "cel = 5./9. * (faren - 32.) print \"The temperature in Celcius is", "float(faren) # The calculation on the right gets saved to the variable on", "the temperature in Fahrenheit): \") faren = float(faren) # The calculation on the", "- 32.) print \"The temperature in Celcius is \" + str(cel) + \"", "calculation on the right gets saved to the variable on the left cel", "saved to the variable on the left cel = 5./9. * (faren -", "5./9. * (faren - 32.) print \"The temperature in Celcius is \" +", "temperature in Fahrenheit): \") faren = float(faren) # The calculation on the right", "on the left cel = 5./9. * (faren - 32.) print \"The temperature", "faren = float(faren) # The calculation on the right gets saved to the", "the left cel = 5./9. * (faren - 32.) print \"The temperature in", "= 5./9. * (faren - 32.) print \"The temperature in Celcius is \"", "faren = raw_input(\"Enter the temperature in Fahrenheit): \") faren = float(faren) # The", "* (faren - 32.) print \"The temperature in Celcius is \" + str(cel)", "right gets saved to the variable on the left cel = 5./9. *", "# The calculation on the right gets saved to the variable on the", "to the variable on the left cel = 5./9. * (faren - 32.)", "variable on the left cel = 5./9. * (faren - 32.) print \"The", "the variable on the left cel = 5./9. * (faren - 32.) print", "= float(faren) # The calculation on the right gets saved to the variable", "on the right gets saved to the variable on the left cel =", "\") faren = float(faren) # The calculation on the right gets saved to", "Fahrenheit): \") faren = float(faren) # The calculation on the right gets saved", "left cel = 5./9. * (faren - 32.) print \"The temperature in Celcius", "raw_input(\"Enter the temperature in Fahrenheit): \") faren = float(faren) # The calculation on", "<reponame>saudijack/unfpyboot<gh_stars>0 faren = raw_input(\"Enter the temperature in Fahrenheit): \") faren = float(faren) #", "The calculation on the right gets saved to the variable on the left", "the right gets saved to the variable on the left cel = 5./9.", "32.) print \"The temperature in Celcius is \" + str(cel) + \" degrees.\"", "gets saved to the variable on the left cel = 5./9. * (faren", "= raw_input(\"Enter the temperature in Fahrenheit): \") faren = float(faren) # The calculation", "(faren - 32.) print \"The temperature in Celcius is \" + str(cel) +" ]
[ "path from .views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run',", "CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>', CheckRunStatus.as_view()), path('callback/run/<int:sub_id>', CallbackRunNow.as_view()), path('callback/submit/<int:verdict_id>', CallbackSubmission.as_view()) ]", "from .views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()),", "\\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>', CheckRunStatus.as_view()), path('callback/run/<int:sub_id>', CallbackRunNow.as_view()), path('callback/submit/<int:verdict_id>', CallbackSubmission.as_view())", "from django.urls import path from .views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns", "<reponame>PICT-ACM-Student-Chapter/OJ_API<filename>app/submission/urls.py from django.urls import path from .views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission", "Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>', CheckRunStatus.as_view()), path('callback/run/<int:sub_id>',", "import path from .views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [", "CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>', CheckRunStatus.as_view()), path('callback/run/<int:sub_id>', CallbackRunNow.as_view()), path('callback/submit/<int:verdict_id>',", "CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>', CheckRunStatus.as_view()), path('callback/run/<int:sub_id>', CallbackRunNow.as_view()),", ".views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>',", "import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns = [ path('run', Run.as_view()), path('run/<int:id>', CheckRunStatus.as_view()),", "django.urls import path from .views import Run, CheckRunStatus, CallbackRunNow, \\ CallbackSubmission urlpatterns =" ]
[ "django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations =", "설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')),", "on 2021-06-24 15:23 from django.conf import settings from django.db import migrations, models import", "않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가", "된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={", "class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [", "수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL,", "[ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board', fields=[ ('id',", "models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')),", "좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')),", "primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가", "설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.',", "help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요", "15:23 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class", "upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료", "verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를", "사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count',", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL),", "verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정", "설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image',", "<reponame>jaethewiederholen/Poolink_backend<gh_stars>0 # Generated by Django 3.1.12 on 2021-06-24 15:23 from django.conf import settings", "수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name': '보드', 'verbose_name_plural': '보드',", "verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩", "이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다.", "operations = [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는", "verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards',", "MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요", "보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를", "django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial", "dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True,", "('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다',", "나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.',", "3.1.12 on 2021-06-24 15:23 from django.conf import settings from django.db import migrations, models", "보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='',", "name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255,", "('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name': '보드', 'verbose_name_plural': '보드', },", "models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩", "by Django 3.1.12 on 2021-06-24 15:23 from django.conf import settings from django.db import", "대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지", "Django 3.1.12 on 2021-06-24 15:23 from django.conf import settings from django.db import migrations,", "related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name': '보드', 'verbose_name_plural': '보드', }, ), ]", "('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드", "이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies", "설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된", "] operations = [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name',", "= True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board', fields=[", "스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ],", "Generated by Django 3.1.12 on 2021-06-24 15:23 from django.conf import settings from django.db", "이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')),", "여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [", "models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True,", "import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations", "help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드", "models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True,", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies =", "수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name':", "models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ]", "# Generated by Django 3.1.12 on 2021-06-24 15:23 from django.conf import settings from", "import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial =", "보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')),", "migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다',", "[ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의", "null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드", "Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel(", "('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count',", "수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user',", "serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한", "나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name': '보드',", "단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')),", "max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio',", "('like_count', models.IntegerField(help_text='보드의 좋아요 수를 나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.',", "from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge',", "('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.',", "models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서 사용되지 않습니다.', verbose_name='유료보드 설정 여부')), ('like_count', models.IntegerField(help_text='보드의", "initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board',", "verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드 이미지입니다.', null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에", "나타냅니다.', verbose_name='좋아요 수')), ('scrap_count', models.IntegerField(help_text='보드가 스크랩 된 수를 나타냅니다.', verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')),", "= [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드',", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드", "null=True, upload_to='', verbose_name='보드 이미지')), ('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False,", "('bio', models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP", "= [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Board', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "verbose_name='ID')), ('name', models.CharField(default='이름없는 보드', help_text='보드의 이름입니다', max_length=255, verbose_name='보드 이름')), ('image', models.ImageField(help_text='유저가 설정한 보드", "settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True", "verbose_name='스크랩 수')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name': '보드', 'verbose_name_plural':", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='boards', to=settings.AUTH_USER_MODEL, verbose_name='보드 소유자')), ], options={ 'verbose_name': '보드', 'verbose_name_plural': '보드', }, ),", "models.TextField(help_text='보드에 대한 설명입니다', null=True, verbose_name='보드 설명')), ('charge', models.BooleanField(default=False, help_text='유료 보드인지를 나타냅니다. MVP 단계에서", "2021-06-24 15:23 from django.conf import settings from django.db import migrations, models import django.db.models.deletion" ]
[ "return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a very inefficient way.\"\"\"", "the model. :param domain: Default domain of each variable. \"\"\" self.num_variables = num_variables", "get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for", "them. \"\"\" x = np.array(x) assert x.shape == (self.num_variables,) result = 1.0 for", "fact_labels = [f.get_name() for f in factors] labels = var_labels + fact_labels labels", "License, Version 2.0 - see LICENSE file. from __future__ import annotations import abc", "the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return self def __len__(self): return self.num_variables", "abc import itertools from typing import TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx", "v = Variable(self, idx, self._default_domain) self._vars[idx] = v return self._vars[idx] def get_variables(self) ->", "vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns", "x in itertools.product( *(v.domain.values for v in model.get_variables())): prob = model.evaluate(np.array(x)) if prob", "graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph, labels = self.get_factor_graph()", "part and factors in other graph. Edge denotes that factor depends on variable.", "for x in itertools.product( *(v.domain.values for v in model.get_variables())): prob = model.evaluate(np.array(x)) if", "variables in the model. :param domain: Default domain of each variable. \"\"\" self.num_variables", "the model. Factor graph is a bipartite graph with variables in one part", "nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self,", "for i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables +", "\"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self, idx: int)", ":param domain: Default domain of each variable. \"\"\" self.num_variables = num_variables self._default_domain =", "import itertools from typing import TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx as", "self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition function in very", "very inefficient way.\"\"\" best_state = None best_prob = 0.0 for x in itertools.product(", "of trivial ``FunctionFactor`` s, each of them representing a factor on one variable", "+ fact_labels labels = {i: labels[i] for i in range(len(labels))} graph = nx.Graph()", "inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most probable state.\"\"\"", "None best_prob = 0.0 for x in itertools.product( *(v.domain.values for v in model.get_variables())):", "class representing any graphical model.\"\"\" def __init__(self, num_variables: int, domain: Domain): \"\"\" :param", "model.get_variables())): prob = model.evaluate(np.array(x)) if prob >= best_prob: best_state = x best_prob =", "= self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos =", "from typing import TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx as nx import", "part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient way.\"\"\" part_func = 0 for x", "np.array(x) assert x.shape == (self.num_variables,) result = 1.0 for factor in self.get_factors(): result", "probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod", "values into factors and multiplies them. \"\"\" x = np.array(x) assert x.shape ==", "FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base import Domain, Factor class", "= dict() def get_variable(self, idx: int) -> Variable: \"\"\"Returns variable by its index.\"\"\"", "state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def", "\"\"\"Builds factor graph for the model. Factor graph is a bipartite graph with", "Draw factors in another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)),", "= x best_prob = prob return best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain", "``FunctionFactor`` s, each of them representing a factor on one variable with identity", "return self def __len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\"", "lambda x: x[0]) for i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]:", "index.\"\"\" if not 0 <= idx < self.num_variables: raise IndexError( \"index %d is", "model.evaluate(np.array(x)) if prob >= best_prob: best_state = x best_prob = prob return best_state", "most probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\"", "used in mathematical expressions, which will result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self,", "in self.get_variables()] fact_labels = [f.get_name() for f in factors] labels = var_labels +", "the Apache License, Version 2.0 - see LICENSE file. from __future__ import annotations", "get_variable(self, idx: int) -> Variable: \"\"\"Returns variable by its index.\"\"\" if not 0", "rights reserved. # Licensed under the Apache License, Version 2.0 - see LICENSE", "List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions. Returns lists of trivial ``FunctionFactor`` s,", "for usage in expressions. Returns lists of trivial ``FunctionFactor`` s, each of them", "for v in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most", "get_max_domain_size(self): \"\"\"Returns the biggest domain size over all variables.\"\"\" return max([var.domain.size() for var", "function. They can be used in mathematical expressions, which will result in another", "idx not in self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx] = v return", "Tuple, Dict, List import networkx as nx import numpy as np from inferlo.base.factors", "inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\" def", "algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all", "all variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self, idx: int) ->", "the most probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates", "[FunctionFactor(self, [i], lambda x: x[0]) for i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph,", "Licensed under the Apache License, Version 2.0 - see LICENSE file. from __future__", "variable. \"\"\" self.num_variables = num_variables self._default_domain = domain self._vars = dict() def get_variable(self,", "in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x: x[0]) for i in", "\"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most probable", "Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\" def __init__(self, num_variables: int,", "= [f.get_name() for f in factors] labels = var_labels + fact_labels labels =", "mathematical expressions, which will result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda", "return best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain size over all variables.\"\"\" return", "<= idx < self.num_variables: raise IndexError( \"index %d is out of bounds for", "factor graph.\"\"\" graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc", "inefficient way.\"\"\" part_func = 0 for x in itertools.product( *(v.domain.values for v in", "[v.name for v in self.get_variables()] fact_labels = [f.get_name() for f in factors] labels", "for v in model.get_variables())): prob = model.evaluate(np.array(x)) if prob >= best_prob: best_state =", "Variable: \"\"\"Returns variable by its index.\"\"\" if not 0 <= idx < self.num_variables:", "that factor depends on variable. \"\"\" factors = list(self.get_factors()) var_labels = [v.name for", ">= best_prob: best_state = x best_prob = prob return best_state def get_max_domain_size(self): \"\"\"Returns", "bipartite graph with variables in one part and factors in other graph. Edge", "a factor on one variable with identity function. They can be used in", "Dict, List import networkx as nx import numpy as np from inferlo.base.factors import", "self def __len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod", "idx: int) -> Variable: \"\"\"Returns variable by its index.\"\"\" if not 0 <=", "a bipartite graph with variables in one part and factors in other graph.", "random vector of size %d\" % ( idx, self.num_variables)) if idx not in", "function in very inefficient way.\"\"\" part_func = 0 for x in itertools.product( *(v.domain.values", "is a bipartite graph with variables in one part and factors in other", "as np from inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING: from", "node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another color. nx.draw_networkx(graph, pos, ax, labels=labels,", "result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x: x[0]) for i", "Apache License, Version 2.0 - see LICENSE file. from __future__ import annotations import", "The InferLO authors. All rights reserved. # Licensed under the Apache License, Version", "most likely state in a very inefficient way.\"\"\" best_state = None best_prob =", "of each variable. \"\"\" self.num_variables = num_variables self._default_domain = domain self._vars = dict()", "TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx as nx import numpy as np", "domain of each variable. \"\"\" self.num_variables = num_variables self._default_domain = domain self._vars =", "Version 2.0 - see LICENSE file. from __future__ import annotations import abc import", "raise IndexError( \"index %d is out of bounds for random vector of size", "node_color='#ffaaaa') # Draw factors in another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc", "-> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for the model. Factor graph is", "variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self, idx: int) -> Variable:", "Variable(self, idx, self._default_domain) self._vars[idx] = v return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns", "LICENSE file. from __future__ import annotations import abc import itertools from typing import", "int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns", "= 0.0 for x in itertools.product( *(v.domain.values for v in model.get_variables())): prob =", "and factors in other graph. Edge denotes that factor depends on variable. \"\"\"", "var_labels = [v.name for v in self.get_variables()] fact_labels = [f.get_name() for f in", "*(v.domain.values for v in model.get_variables())): prob = model.evaluate(np.array(x)) if prob >= best_prob: best_state", "draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0]", "def __len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def", "\"\"\"Returns value of non-normalized pdf in point. In other words, just substitutes values", "np.ndarray: \"\"\"Finds the most probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs) ->", "- see LICENSE file. from __future__ import annotations import abc import itertools from", "on variable. \"\"\" factors = list(self.get_factors()) var_labels = [v.name for v in self.get_variables()]", "from inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base import", "= model.evaluate(np.array(x)) if prob >= best_prob: best_state = x best_prob = prob return", "best_prob: best_state = x best_prob = prob return best_state def get_max_domain_size(self): \"\"\"Returns the", "in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws", "i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)),", "with variables in one part and factors in other graph. Edge denotes that", "if not 0 <= idx < self.num_variables: raise IndexError( \"index %d is out", "int) -> Variable: \"\"\"Returns variable by its index.\"\"\" if not 0 <= idx", "file. from __future__ import annotations import abc import itertools from typing import TYPE_CHECKING,", "return [self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self, idx: int) -> Variable: return", "nx import numpy as np from inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable", "s, each of them representing a factor on one variable with identity function.", "= self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels,", "in itertools.product( *(v.domain.values for v in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def", "variable. \"\"\" factors = list(self.get_factors()) var_labels = [v.name for v in self.get_variables()] fact_labels", "substitutes values into factors and multiplies them. \"\"\" x = np.array(x) assert x.shape", "Domain): \"\"\" :param num_variables: Number of variables in the model. :param domain: Default", "for x in itertools.product( *(v.domain.values for v in model.get_variables())): part_func += model.evaluate(np.array(x)) return", "likely state in a very inefficient way.\"\"\" best_state = None best_prob = 0.0", "nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc,", "\"\"\"Draws the factor graph.\"\"\" graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc =", "in expressions. Returns lists of trivial ``FunctionFactor`` s, each of them representing a", "\"\"\"Finds the most probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray:", "\"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) ->", "self.add_factor(other) return self def __len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs", "GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\" def __init__(self, num_variables: int, domain: Domain):", "typing import TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx as nx import numpy", "\"\"\"Returns the biggest domain size over all variables.\"\"\" return max([var.domain.size() for var in", "self.num_variables + len(factors)), bipartite=1) for factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id,", "<filename>inferlo/base/graph_model.py<gh_stars>1-10 # Copyright (c) 2020, The InferLO authors. All rights reserved. # Licensed", "0 <= idx < self.num_variables: raise IndexError( \"index %d is out of bounds", "and multiplies them. \"\"\" x = np.array(x) assert x.shape == (self.num_variables,) result =", "f in factors] labels = var_labels + fact_labels labels = {i: labels[i] for", "annotations import abc import itertools from typing import TYPE_CHECKING, Iterable, Tuple, Dict, List", "str]]: \"\"\"Builds factor graph for the model. Factor graph is a bipartite graph", "for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels def draw_factor_graph(self,", "\"\"\" return [FunctionFactor(self, [i], lambda x: x[0]) for i in range(self.num_variables)] def get_factor_graph(self)", "self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph,", "[f.get_name() for f in factors] labels = var_labels + fact_labels labels = {i:", "range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for", ":param num_variables: Number of variables in the model. :param domain: Default domain of", "self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i in", "part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a", "2020, The InferLO authors. All rights reserved. # Licensed under the Apache License,", "factor to the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return self def __len__(self):", "-> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage", "model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in", "nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float:", "fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)),", "inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract", "= 0 for x in itertools.product( *(v.domain.values for v in model.get_variables())): part_func +=", "very inefficient way.\"\"\" part_func = 0 for x in itertools.product( *(v.domain.values for v", "if TYPE_CHECKING: from inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any", "self._default_domain) self._vars[idx] = v return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\"", "graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1])", "evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value of non-normalized pdf in point. In", "vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax,", "List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self, idx:", "inefficient way.\"\"\" best_state = None best_prob = 0.0 for x in itertools.product( *(v.domain.values", "# Copyright (c) 2020, The InferLO authors. All rights reserved. # Licensed under", "**kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most", "Factor): self.add_factor(other) return self def __len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs):", "fact_labels labels = {i: labels[i] for i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables),", "nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos,", "in itertools.product( *(v.domain.values for v in model.get_variables())): prob = model.evaluate(np.array(x)) if prob >=", "multiplies them. \"\"\" x = np.array(x) assert x.shape == (self.num_variables,) result = 1.0", "any graphical model.\"\"\" def __init__(self, num_variables: int, domain: Domain): \"\"\" :param num_variables: Number", "+ len(factors)), bipartite=1) for factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables", "add_factor(self, factor: Factor): \"\"\"Adds a factor to the model.\"\"\" def __imul__(self, other: Factor):", "Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage in", "List import networkx as nx import numpy as np from inferlo.base.factors import FunctionFactor", "v return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for", "def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)]", "in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1)", "graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the factor", "state in a very inefficient way.\"\"\" best_state = None best_prob = 0.0 for", "graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id", "+= model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a very", "domain self._vars = dict() def get_variable(self, idx: int) -> Variable: \"\"\"Returns variable by", "factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions. Returns lists", "(self.num_variables,) result = 1.0 for factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result", "pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another color. nx.draw_networkx(graph,", "domain: Default domain of each variable. \"\"\" self.num_variables = num_variables self._default_domain = domain", "of bounds for random vector of size %d\" % ( idx, self.num_variables)) if", "Returns lists of trivial ``FunctionFactor`` s, each of them representing a factor on", "self._vars[idx] = v return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return", "part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a very inefficient way.\"\"\" best_state", "not in self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx] = v return self._vars[idx]", "``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x: x[0]) for i in range(self.num_variables)] def", "-> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def", "factors = list(self.get_factors()) var_labels = [v.name for v in self.get_variables()] fact_labels = [f.get_name()", "self.get_variables()] fact_labels = [f.get_name() for f in factors] labels = var_labels + fact_labels", "in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state", "0 for x in itertools.product( *(v.domain.values for v in model.get_variables())): part_func += model.evaluate(np.array(x))", "labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another color. nx.draw_networkx(graph, pos, ax,", "bounds for random vector of size %d\" % ( idx, self.num_variables)) if idx", "( idx, self.num_variables)) if idx not in self._vars: v = Variable(self, idx, self._default_domain)", "\"\"\"Evaluates most likely state in a very inefficient way.\"\"\" best_state = None best_prob", "size %d\" % ( idx, self.num_variables)) if idx not in self._vars: v =", "result def part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient way.\"\"\" part_func = 0", "InferLO authors. All rights reserved. # Licensed under the Apache License, Version 2.0", "labels = var_labels + fact_labels labels = {i: labels[i] for i in range(len(labels))}", "def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph, labels = self.get_factor_graph() top =", "top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top)", "other: Factor): self.add_factor(other) return self def __len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto',", "*= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient way.\"\"\"", "each variable. \"\"\" self.num_variables = num_variables self._default_domain = domain self._vars = dict() def", "v in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely", "top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another", "\"\"\" self.num_variables = num_variables self._default_domain = domain self._vars = dict() def get_variable(self, idx:", "__imul__(self, other: Factor): self.add_factor(other) return self def __len__(self): return self.num_variables @abc.abstractmethod def infer(self,", "if idx not in self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx] = v", "self.num_variables = num_variables self._default_domain = domain self._vars = dict() def get_variable(self, idx: int)", "trivial ``FunctionFactor`` s, each of them representing a factor on one variable with", "graph with variables in one part and factors in other graph. Edge denotes", "another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen')", "@abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares", "labels def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph, labels = self.get_factor_graph() top", "@abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a factor to the model.\"\"\" def __imul__(self,", "self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o',", "result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient", "prob return best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain size over all variables.\"\"\"", "return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto',", "x.shape == (self.num_variables,) result = 1.0 for factor in self.get_factors(): result *= factor.value(x[factor.var_idx])", "Variable if TYPE_CHECKING: from inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing", "factor_id) return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph, labels", "range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for the model.", "best_prob = 0.0 for x in itertools.product( *(v.domain.values for v in model.get_variables())): prob", "words, just substitutes values into factors and multiplies them. \"\"\" x = np.array(x)", "fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value of", "numpy as np from inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING:", "= domain self._vars = dict() def get_variable(self, idx: int) -> Variable: \"\"\"Returns variable", "Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for the model. Factor graph is a", "best_state = x best_prob = prob return best_state def get_max_domain_size(self): \"\"\"Returns the biggest", "factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition function", "x: x[0]) for i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds", "Copyright (c) 2020, The InferLO authors. All rights reserved. # Licensed under the", "graphical model.\"\"\" def __init__(self, num_variables: int, domain: Domain): \"\"\" :param num_variables: Number of", "num_variables: int, domain: Domain): \"\"\" :param num_variables: Number of variables in the model.", "for the model. Factor graph is a bipartite graph with variables in one", "factors in other graph. Edge denotes that factor depends on variable. \"\"\" factors", "import numpy as np from inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable if", "self.num_variables + factor_id) return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\"", "itertools from typing import TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx as nx", "\"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions.", "model. :param domain: Default domain of each variable. \"\"\" self.num_variables = num_variables self._default_domain", "node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value of non-normalized pdf in", "all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions. Returns", "ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another color. nx.draw_networkx(graph, pos,", "in another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[],", "partition function in very inefficient way.\"\"\" part_func = 0 for x in itertools.product(", "way.\"\"\" part_func = 0 for x in itertools.product( *(v.domain.values for v in model.get_variables())):", "x[0]) for i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor", "\"index %d is out of bounds for random vector of size %d\" %", "v in model.get_variables())): prob = model.evaluate(np.array(x)) if prob >= best_prob: best_state = x", "\"\"\"Prepares variables for usage in expressions. Returns lists of trivial ``FunctionFactor`` s, each", "lists of trivial ``FunctionFactor`` s, each of them representing a factor on one", "in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for the", "representing any graphical model.\"\"\" def __init__(self, num_variables: int, domain: Domain): \"\"\" :param num_variables:", "best_prob = prob return best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain size over", "idx, self._default_domain) self._vars[idx] = v return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all", "return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i", "import FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base import Domain, Factor", "Iterable, Tuple, Dict, List import networkx as nx import numpy as np from", "__init__(self, num_variables: int, domain: Domain): \"\"\" :param num_variables: Number of variables in the", "-> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self,", "= nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors", "= None best_prob = 0.0 for x in itertools.product( *(v.domain.values for v in", "a very inefficient way.\"\"\" best_state = None best_prob = 0.0 for x in", "float: \"\"\"Returns value of non-normalized pdf in point. In other words, just substitutes", "see LICENSE file. from __future__ import annotations import abc import itertools from typing", "-> np.ndarray: \"\"\"Finds the most probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto', **kwargs)", "They can be used in mathematical expressions, which will result in another ``FunctionFactor``.", "def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions. Returns lists of", "i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for", "def part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient way.\"\"\" part_func = 0 for", "num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]:", "infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds", "*(v.domain.values for v in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates", "= v return self._vars[idx] def get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i)", "In other words, just substitutes values into factors and multiplies them. \"\"\" x", "algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the", "return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a factor to the model.\"\"\"", "# Licensed under the Apache License, Version 2.0 - see LICENSE file. from", "factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the", "for v in self.get_variables()] fact_labels = [f.get_name() for f in factors] labels =", "one part and factors in other graph. Edge denotes that factor depends on", "will result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x: x[0]) for", "in a very inefficient way.\"\"\" best_state = None best_prob = 0.0 for x", "import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\" def __init__(self,", "import annotations import abc import itertools from typing import TYPE_CHECKING, Iterable, Tuple, Dict,", "factor graph for the model. Factor graph is a bipartite graph with variables", "itertools.product( *(v.domain.values for v in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model):", "result = 1.0 for factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def", "identity function. They can be used in mathematical expressions, which will result in", "self._vars = dict() def get_variable(self, idx: int) -> Variable: \"\"\"Returns variable by its", "part_func = 0 for x in itertools.product( *(v.domain.values for v in model.get_variables())): part_func", "algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most probable state.\"\"\" def sample(self, num_samples: int,", "usage in expressions. Returns lists of trivial ``FunctionFactor`` s, each of them representing", "int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a factor", "a factor to the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return self def", "def __imul__(self, other: Factor): self.add_factor(other) return self def __len__(self): return self.num_variables @abc.abstractmethod def", "dict() def get_variable(self, idx: int) -> Variable: \"\"\"Returns variable by its index.\"\"\" if", "All rights reserved. # Licensed under the Apache License, Version 2.0 - see", "\"\"\"Evaluates partition function in very inefficient way.\"\"\" part_func = 0 for x in", "get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for the model. Factor graph", "bipartite=1) for factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id)", "best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain size over all variables.\"\"\" return max([var.domain.size()", "def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph for the model. Factor", "< self.num_variables: raise IndexError( \"index %d is out of bounds for random vector", "def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables", "inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base import Domain,", "range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx:", "pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x:", "model. Factor graph is a bipartite graph with variables in one part and", "%d is out of bounds for random vector of size %d\" % (", "-> float: \"\"\"Returns value of non-normalized pdf in point. In other words, just", "def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray:", "len(factors)), bipartite=1) for factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables +", "\"\"\" x = np.array(x) assert x.shape == (self.num_variables,) result = 1.0 for factor", "assert x.shape == (self.num_variables,) result = 1.0 for factor in self.get_factors(): result *=", "labels[i] for i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables", "in self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx] = v return self._vars[idx] def", "var_labels + fact_labels labels = {i: labels[i] for i in range(len(labels))} graph =", "get_symbolic_variables(self) -> List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions. Returns lists of trivial", "labels = {i: labels[i] for i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0)", "into factors and multiplies them. \"\"\" x = np.array(x) assert x.shape == (self.num_variables,)", "is out of bounds for random vector of size %d\" % ( idx,", "from inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\"", "= 1.0 for factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model):", "the factor graph.\"\"\" graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables", "def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value of non-normalized pdf in point.", "\"\"\" :param num_variables: Number of variables in the model. :param domain: Default domain", "for factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition", "list(self.get_factors()) var_labels = [v.name for v in self.get_variables()] fact_labels = [f.get_name() for f", "= nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph,", "factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph,", "= np.array(x) assert x.shape == (self.num_variables,) result = 1.0 for factor in self.get_factors():", "= [v.name for v in self.get_variables()] fact_labels = [f.get_name() for f in factors]", "factors in another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s',", "def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most probable state.\"\"\" def sample(self,", "def get_variable(self, idx: int) -> Variable: \"\"\"Returns variable by its index.\"\"\" if not", "labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) ->", "prob >= best_prob: best_state = x best_prob = prob return best_state def get_max_domain_size(self):", "0.0 for x in itertools.product( *(v.domain.values for v in model.get_variables())): prob = model.evaluate(np.array(x))", "import TYPE_CHECKING, Iterable, Tuple, Dict, List import networkx as nx import numpy as", "num_variables self._default_domain = domain self._vars = dict() def get_variable(self, idx: int) -> Variable:", "= Variable(self, idx, self._default_domain) self._vars[idx] = v return self._vars[idx] def get_variables(self) -> List[Variable]:", "def add_factor(self, factor: Factor): \"\"\"Adds a factor to the model.\"\"\" def __imul__(self, other:", "reserved. # Licensed under the Apache License, Version 2.0 - see LICENSE file.", "= len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa')", "def __getitem__(self, idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor):", "factors and multiplies them. \"\"\" x = np.array(x) assert x.shape == (self.num_variables,) result", "self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a factor to the model.\"\"\" def", "= var_labels + fact_labels labels = {i: labels[i] for i in range(len(labels))} graph", "def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a very inefficient way.\"\"\" best_state =", "max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most probable state.\"\"\" def sample(self, num_samples:", "in the model. :param domain: Default domain of each variable. \"\"\" self.num_variables =", "self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs)", "denotes that factor depends on variable. \"\"\" factors = list(self.get_factors()) var_labels = [v.name", "{i: labels[i] for i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables,", "way.\"\"\" best_state = None best_prob = 0.0 for x in itertools.product( *(v.domain.values for", "graph is a bipartite graph with variables in one part and factors in", "be used in mathematical expressions, which will result in another ``FunctionFactor``. \"\"\" return", "**kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\"", "with identity function. They can be used in mathematical expressions, which will result", "= list(self.get_factors()) var_labels = [v.name for v in self.get_variables()] fact_labels = [f.get_name() for", "class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\" def __init__(self, num_variables: int, domain:", "graph for the model. Factor graph is a bipartite graph with variables in", "in model.get_variables())): prob = model.evaluate(np.array(x)) if prob >= best_prob: best_state = x best_prob", "max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a very inefficient way.\"\"\" best_state = None", "of variables in the model. :param domain: Default domain of each variable. \"\"\"", "vector of size %d\" % ( idx, self.num_variables)) if idx not in self._vars:", "x = np.array(x) assert x.shape == (self.num_variables,) result = 1.0 for factor in", "on one variable with identity function. They can be used in mathematical expressions,", "node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value of non-normalized", "networkx as nx import numpy as np from inferlo.base.factors import FunctionFactor from inferlo.base.variable", "\"\"\"Abstract class representing any graphical model.\"\"\" def __init__(self, num_variables: int, domain: Domain): \"\"\"", "x: np.ndarray) -> float: \"\"\"Returns value of non-normalized pdf in point. In other", "def get_max_domain_size(self): \"\"\"Returns the biggest domain size over all variables.\"\"\" return max([var.domain.size() for", "non-normalized pdf in point. In other words, just substitutes values into factors and", "of size %d\" % ( idx, self.num_variables)) if idx not in self._vars: v", "-> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a factor to", "-> List[FunctionFactor]: \"\"\"Prepares variables for usage in expressions. Returns lists of trivial ``FunctionFactor``", "in very inefficient way.\"\"\" part_func = 0 for x in itertools.product( *(v.domain.values for", "np from inferlo.base.factors import FunctionFactor from inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base", "self._default_domain = domain self._vars = dict() def get_variable(self, idx: int) -> Variable: \"\"\"Returns", "point. In other words, just substitutes values into factors and multiplies them. \"\"\"", "graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id in range(len(factors)): for var_id in", "in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels", "can be used in mathematical expressions, which will result in another ``FunctionFactor``. \"\"\"", "idx, self.num_variables)) if idx not in self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx]", "Edge denotes that factor depends on variable. \"\"\" factors = list(self.get_factors()) var_labels =", "in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition function in", "as nx import numpy as np from inferlo.base.factors import FunctionFactor from inferlo.base.variable import", "self.num_variables)) if idx not in self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx] =", "self._vars: v = Variable(self, idx, self._default_domain) self._vars[idx] = v return self._vars[idx] def get_variables(self)", "np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self)", "one variable with identity function. They can be used in mathematical expressions, which", "import Variable if TYPE_CHECKING: from inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class", "__getitem__(self, idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds", "= num_variables self._default_domain = domain self._vars = dict() def get_variable(self, idx: int) ->", "Factor): \"\"\"Adds a factor to the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return", "range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels def", "prob = model.evaluate(np.array(x)) if prob >= best_prob: best_state = x best_prob = prob", "\"\"\"Returns variable by its index.\"\"\" if not 0 <= idx < self.num_variables: raise", "color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def", "def sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self)", "value of non-normalized pdf in point. In other words, just substitutes values into", "= nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id in", "best_state = None best_prob = 0.0 for x in itertools.product( *(v.domain.values for v", "idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a", "authors. All rights reserved. # Licensed under the Apache License, Version 2.0 -", "Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical model.\"\"\" def __init__(self, num_variables:", "for i in range(self.num_variables)] def __getitem__(self, idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod", "return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph, labels =", "@abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) ->", "idx < self.num_variables: raise IndexError( \"index %d is out of bounds for random", "labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc = len(nx.bipartite.sets(graph)[1]) pos", "return result def part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient way.\"\"\" part_func =", "by its index.\"\"\" if not 0 <= idx < self.num_variables: raise IndexError( \"index", "[i], lambda x: x[0]) for i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int,", "variable with identity function. They can be used in mathematical expressions, which will", "in mathematical expressions, which will result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i],", "var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return graph, labels def draw_factor_graph(self, ax):", "expressions, which will result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x:", "__future__ import annotations import abc import itertools from typing import TYPE_CHECKING, Iterable, Tuple,", "TYPE_CHECKING: from inferlo.base import Domain, Factor class GraphModel(abc.ABC): \"\"\"Abstract class representing any graphical", "for random vector of size %d\" % ( idx, self.num_variables)) if idx not", "1.0 for factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates", "x best_prob = prob return best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain size", "== (self.num_variables,) result = 1.0 for factor in self.get_factors(): result *= factor.value(x[factor.var_idx]) return", "edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value of non-normalized pdf", "in other graph. Edge denotes that factor depends on variable. \"\"\" factors =", "= prob return best_state def get_max_domain_size(self): \"\"\"Returns the biggest domain size over all", "% ( idx, self.num_variables)) if idx not in self._vars: v = Variable(self, idx,", "representing a factor on one variable with identity function. They can be used", "which will result in another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x: x[0])", "factors] labels = var_labels + fact_labels labels = {i: labels[i] for i in", "[self.get_variable(i) for i in range(self.num_variables)] def __getitem__(self, idx: int) -> Variable: return self.get_variable(idx)", "= {i: labels[i] for i in range(len(labels))} graph = nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from(", "in one part and factors in other graph. Edge denotes that factor depends", "range(self.num_variables)] def __getitem__(self, idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor:", "model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return self def __len__(self): return self.num_variables @abc.abstractmethod", "2.0 - see LICENSE file. from __future__ import annotations import abc import itertools", "return [FunctionFactor(self, [i], lambda x: x[0]) for i in range(self.num_variables)] def get_factor_graph(self) ->", "for factor_id in range(len(factors)): for var_id in factors[factor_id].var_idx: graph.add_edge(var_id, self.num_variables + factor_id) return", "-> Variable: \"\"\"Returns variable by its index.\"\"\" if not 0 <= idx <", "each of them representing a factor on one variable with identity function. They", "**kwargs) -> np.ndarray: \"\"\"Finds the most probable state.\"\"\" def sample(self, num_samples: int, algorithm='auto',", "\"\"\" factors = list(self.get_factors()) var_labels = [v.name for v in self.get_variables()] fact_labels =", "def __init__(self, num_variables: int, domain: Domain): \"\"\" :param num_variables: Number of variables in", "under the Apache License, Version 2.0 - see LICENSE file. from __future__ import", "if prob >= best_prob: best_state = x best_prob = prob return best_state def", "Factor graph is a bipartite graph with variables in one part and factors", "Default domain of each variable. \"\"\" self.num_variables = num_variables self._default_domain = domain self._vars", "samples.\"\"\" @abc.abstractmethod def get_factors(self) -> Iterable[Factor]: \"\"\"Returns all factors.\"\"\" def get_symbolic_variables(self) -> List[FunctionFactor]:", "expressions. Returns lists of trivial ``FunctionFactor`` s, each of them representing a factor", "domain: Domain): \"\"\" :param num_variables: Number of variables in the model. :param domain:", "graph. Edge denotes that factor depends on variable. \"\"\" factors = list(self.get_factors()) var_labels", "Dict[int, str]]: \"\"\"Builds factor graph for the model. Factor graph is a bipartite", "variable by its index.\"\"\" if not 0 <= idx < self.num_variables: raise IndexError(", "%d\" % ( idx, self.num_variables)) if idx not in self._vars: v = Variable(self,", "sample(self, num_samples: int, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Generates samples.\"\"\" @abc.abstractmethod def get_factors(self) ->", "get_variables(self) -> List[Variable]: \"\"\"Returns all variables.\"\"\" return [self.get_variable(i) for i in range(self.num_variables)] def", "__len__(self): return self.num_variables @abc.abstractmethod def infer(self, algorithm='auto', **kwargs): \"\"\"Performs inference.\"\"\" @abc.abstractmethod def max_likelihood(self,", "v in self.get_variables()] fact_labels = [f.get_name() for f in factors] labels = var_labels", "depends on variable. \"\"\" factors = list(self.get_factors()) var_labels = [v.name for v in", "\"\"\"Adds a factor to the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return self", "in factors] labels = var_labels + fact_labels labels = {i: labels[i] for i", "num_variables: Number of variables in the model. :param domain: Default domain of each", "itertools.product( *(v.domain.values for v in model.get_variables())): prob = model.evaluate(np.array(x)) if prob >= best_prob:", "other graph. Edge denotes that factor depends on variable. \"\"\" factors = list(self.get_factors())", "from __future__ import annotations import abc import itertools from typing import TYPE_CHECKING, Iterable,", "pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw", "pdf in point. In other words, just substitutes values into factors and multiplies", "its index.\"\"\" if not 0 <= idx < self.num_variables: raise IndexError( \"index %d", "to the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other) return self def __len__(self): return", "in point. In other words, just substitutes values into factors and multiplies them.", "import networkx as nx import numpy as np from inferlo.base.factors import FunctionFactor from", "another ``FunctionFactor``. \"\"\" return [FunctionFactor(self, [i], lambda x: x[0]) for i in range(self.num_variables)]", "the biggest domain size over all variables.\"\"\" return max([var.domain.size() for var in self.get_variables()])", "IndexError( \"index %d is out of bounds for random vector of size %d\"", "in range(self.num_variables)] def __getitem__(self, idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self,", "len(nx.bipartite.sets(graph)[1]) pos = nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') #", "x in itertools.product( *(v.domain.values for v in model.get_variables())): part_func += model.evaluate(np.array(x)) return part_func", "Variable: return self.get_variable(idx) @abc.abstractmethod def add_factor(self, factor: Factor): \"\"\"Adds a factor to the", "them representing a factor on one variable with identity function. They can be", "just substitutes values into factors and multiplies them. \"\"\" x = np.array(x) assert", "not 0 <= idx < self.num_variables: raise IndexError( \"index %d is out of", "for f in factors] labels = var_labels + fact_labels labels = {i: labels[i]", "factor on one variable with identity function. They can be used in mathematical", "for i in range(self.num_variables)] def get_factor_graph(self) -> Tuple[nx.Graph, Dict[int, str]]: \"\"\"Builds factor graph", "+ factor_id) return graph, labels def draw_factor_graph(self, ax): \"\"\"Draws the factor graph.\"\"\" graph,", "(c) 2020, The InferLO authors. All rights reserved. # Licensed under the Apache", "bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id in range(len(factors)): for var_id", "other words, just substitutes values into factors and multiplies them. \"\"\" x =", "factor.value(x[factor.var_idx]) return result def part_func_bruteforce(model): \"\"\"Evaluates partition function in very inefficient way.\"\"\" part_func", "graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id in range(len(factors)): for", "factor: Factor): \"\"\"Adds a factor to the model.\"\"\" def __imul__(self, other: Factor): self.add_factor(other)", "+ fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray) -> float: \"\"\"Returns value", "self.num_variables: raise IndexError( \"index %d is out of bounds for random vector of", "from inferlo.base.variable import Variable if TYPE_CHECKING: from inferlo.base import Domain, Factor class GraphModel(abc.ABC):", "of them representing a factor on one variable with identity function. They can", "of non-normalized pdf in point. In other words, just substitutes values into factors", "out of bounds for random vector of size %d\" % ( idx, self.num_variables))", "ax): \"\"\"Draws the factor graph.\"\"\" graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc", "import abc import itertools from typing import TYPE_CHECKING, Iterable, Tuple, Dict, List import", "Number of variables in the model. :param domain: Default domain of each variable.", "int, domain: Domain): \"\"\" :param num_variables: Number of variables in the model. :param", "variables for usage in expressions. Returns lists of trivial ``FunctionFactor`` s, each of", "factor depends on variable. \"\"\" factors = list(self.get_factors()) var_labels = [v.name for v", "np.ndarray) -> float: \"\"\"Returns value of non-normalized pdf in point. In other words,", "nx.bipartite_layout(graph, top) nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in", "i in range(self.num_variables)] def __getitem__(self, idx: int) -> Variable: return self.get_variable(idx) @abc.abstractmethod def", "# Draw factors in another color. nx.draw_networkx(graph, pos, ax, labels=labels, nodelist=list(range(vc, vc +", "ax, labels=labels, nodelist=list(range(vc, vc + fc)), node_shape='s', edgelist=[], node_color='lightgreen') def evaluate(self, x: np.ndarray)", "model.\"\"\" def __init__(self, num_variables: int, domain: Domain): \"\"\" :param num_variables: Number of variables", "variables in one part and factors in other graph. Edge denotes that factor", "model.evaluate(np.array(x)) return part_func def max_likelihood_bruteforce(model): \"\"\"Evaluates most likely state in a very inefficient", "nx.Graph() graph.add_nodes_from(range(self.num_variables), bipartite=0) graph.add_nodes_from( range(self.num_variables, self.num_variables + len(factors)), bipartite=1) for factor_id in range(len(factors)):", "@abc.abstractmethod def max_likelihood(self, algorithm='auto', **kwargs) -> np.ndarray: \"\"\"Finds the most probable state.\"\"\" def", "graph.\"\"\" graph, labels = self.get_factor_graph() top = nx.bipartite.sets(graph)[0] vc = self.num_variables fc =", "nx.draw_networkx(graph, pos, ax, labels=labels, node_shape='o', nodelist=list(range(vc)), node_color='#ffaaaa') # Draw factors in another color." ]
[ "// 2 image.anchor_y = image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\")", "points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] - point_2[1]) ** 2)", "import math from pyglet.window import key score = 0 game_window = pyglet.window.Window() pyglet.resource.path", "*args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids = [] for i", "[] for i in range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position)", "= random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 - 50", "for obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw()", "self.velocity_y * dt def collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance =", "between two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] - point_2[1])", "key score = 0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img =", "image.anchor_x = image.width // 2 image.anchor_y = image.height // 2 player_image = pyglet.resource.image(\"ship.png\")", "= 0 self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers): if symbol == key.UP:", "self.velocity_x = 0 self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers): if symbol ==", "**kwargs) self.keys = dict(left=False, right=False, up=False, down = False) self.rotate_speed = 200.0 self.velocity_x", "y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100", "for to_remove in [obj for obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event", "50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an image's", "self.dead = True global score score += 1 class Asteroid(PhysicalObject): def __init__(self, *args,", "self.y -= 100 * dt player = Player(img=player_image, x=400, y=300) game_objects = asteroids", "new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def", "import key score = 0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img", "360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation =", "collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance <= collision_distance)", "level_label.draw() score_label.draw() player.draw() for asteroid in asteroids: if not asteroid.dead: asteroid.draw() game_window.push_handlers(player) pyglet.clock.schedule_interval(update,", "player = Player(img=player_image, x=400, y=300) game_objects = asteroids + [player] def update(dt): for", "= game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b in baloons: if not b.dead", "for i in range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position) <", "800) asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0,", "global score score += 1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args,", "100 * dt if self.keys['right']: self.x += 100 * dt if self.keys['up']: self.y", "asteroid_y), player_position) < 10: asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid", "score score += 1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs)", "x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position)", "asteroids + [player] def update(dt): for obj in game_objects: obj.update(dt) player = game_objects[-1]", "symbol == key.DOWN: self.keys['down'] = False def update(self, dt): super(Player, self).update(dt) if self.keys['left']:", "= dict(left=False, right=False, up=False, down = False) self.rotate_speed = 200.0 self.velocity_x = 0", "self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self, dt):", "right=False, up=False, down = False) self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y =", "b in baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj", "dt def collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position)", "Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y", "class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False,", "if not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for obj in", "def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids = []", "@game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in asteroids: if not", "pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300)", "\"Score: {}\".format(score) for b in baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player) for", "self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers):", "obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in", "asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360)", "new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an image's anchor", "= image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\")", "*args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False, down = False) self.rotate_speed", "while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0,", "asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys", "0.0, 0.0 def update(self, dt): self.x += self.velocity_x * dt self.y += self.velocity_y", "in [obj for obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw():", "(point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args,", "player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False,", "an image's anchor point to its center\"\"\" image.anchor_x = image.width // 2 image.anchor_y", "for obj in game_objects: obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1] score_label.text =", "== key.UP: self.keys['up'] = False elif symbol == key.LEFT: self.keys['left'] = False elif", "+ [player] def update(dt): for obj in game_objects: obj.update(dt) player = game_objects[-1] baloons", "**kwargs) def asteroids(num_asteroids, player_position): asteroids = [] for i in range(num_asteroids): asteroid_x, asteroid_y", "key.UP: self.keys['up'] = False elif symbol == key.LEFT: self.keys['left'] = False elif symbol", "resources import random import math from pyglet.window import key score = 0 game_window", "360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an image's anchor point to its", "dt if self.keys['right']: self.x += 100 * dt if self.keys['up']: self.y += 100", "ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance between two", "player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self,", "self.x -= 100 * dt if self.keys['right']: self.x += 100 * dt if", "symbol == key.RIGHT: self.keys['right'] = False elif symbol == key.DOWN: self.keys['down'] = False", "player = game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b in", "asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an image's anchor point to its center\"\"\"", "= [] for i in range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y),", "math from pyglet.window import key score = 0 game_window = pyglet.window.Window() pyglet.resource.path =", "super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False, down = False) self.rotate_speed = 200.0", "self.keys['right']: self.x += 100 * dt if self.keys['up']: self.y += 100 * dt", "dt player = Player(img=player_image, x=400, y=300) game_objects = asteroids + [player] def update(dt):", "self.velocity_x * dt self.y += self.velocity_y * dt def collides_with(self, other_object): collision_distance =", "center_image(image): \"\"\"Sets an image's anchor point to its center\"\"\" image.anchor_x = image.width //", "symbol == key.UP: self.keys['up'] = False elif symbol == key.LEFT: self.keys['left'] = False", "game_objects = asteroids + [player] def update(dt): for obj in game_objects: obj.update(dt) player", "pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\",", "random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid)", "b.handle_collision_with(player) for to_remove in [obj for obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove)", "**kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids = [] for i in", "key.LEFT: self.keys['left'] = False elif symbol == key.RIGHT: self.keys['right'] = False elif symbol", "asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster", "-= 100 * dt player = Player(img=player_image, x=400, y=300) game_objects = asteroids +", "update(self, dt): self.x += self.velocity_x * dt self.y += self.velocity_y * dt def", "{}\".format(score) for b in baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove", "return asteroids def center_image(image): \"\"\"Sets an image's anchor point to its center\"\"\" image.anchor_x", "= asteroids + [player] def update(dt): for obj in game_objects: obj.update(dt) player =", "design import pyglet import resources import random import math from pyglet.window import key", "def update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x -= 100 * dt if", "dt if self.keys['up']: self.y += 100 * dt if self.keys['down']: self.y -= 100", "game_objects: obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for", "self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self,", "self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids = [] for i in range(num_asteroids): asteroid_x,", "obj in game_objects: obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score:", "to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in asteroids:", "= pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance between two points\"\"\"", "center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def", "in baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for", "distance between two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -", "x=400, y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args,", "False) self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x def on_key_press(self, symbol,", "if symbol == key.UP: self.keys['up'] = True elif symbol == key.LEFT: self.keys['left'] =", "= pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10,", "// 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label =", "up=False, down = False) self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x", "= random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360)", "player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score),", "elif symbol == key.RIGHT: self.keys['right'] = False elif symbol == key.DOWN: self.keys['down'] =", "\"\"\"Sets an image's anchor point to its center\"\"\" image.anchor_x = image.width // 2", "self.keys['left']: self.x -= 100 * dt if self.keys['right']: self.x += 100 * dt", "2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False", "Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20,", "on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in asteroids: if not asteroid.dead: asteroid.draw()", "= True elif symbol == key.RIGHT: self.keys['right'] = True elif symbol == key.DOWN:", "pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon", "game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in asteroids: if not asteroid.dead: asteroid.draw() game_window.push_handlers(player)", "player_position): asteroids = [] for i in range(num_asteroids): asteroid_x, asteroid_y = player_position while", "PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y", "distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance between two points\"\"\" return math.sqrt((point_1[0] -", "pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns", "symbol == key.RIGHT: self.keys['right'] = True elif symbol == key.DOWN: self.keys['down'] = True", "center\"\"\" image.anchor_x = image.width // 2 image.anchor_y = image.height // 2 player_image =", "== key.DOWN: self.keys['down'] = True def on_key_release(self, symbol, modifiers): if symbol == key.UP:", "key.RIGHT: self.keys['right'] = False elif symbol == key.DOWN: self.keys['down'] = False def update(self,", "import random import math from pyglet.window import key score = 0 game_window =", "__init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y = 0.0,", "- 50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return", "self.keys['up']: self.y += 100 * dt if self.keys['down']: self.y -= 100 * dt", "class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x,", "new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 -", "other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance <=", "+= self.velocity_y * dt def collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance", "2 image.anchor_y = image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image", "0 self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers): if symbol == key.UP: self.keys['up']", "Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False, down", "self.keys['up'] = False elif symbol == key.LEFT: self.keys['left'] = False elif symbol ==", "== key.LEFT: self.keys['left'] = False elif symbol == key.RIGHT: self.keys['right'] = False elif", "b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for obj in game_objects if", "point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead", "if self.keys['down']: self.y -= 100 * dt player = Player(img=player_image, x=400, y=300) game_objects", "not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for obj in game_objects", "self.velocity_x def on_key_press(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = True elif", "dict(left=False, right=False, up=False, down = False) self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y", "[player] def update(dt): for obj in game_objects: obj.update(dt) player = game_objects[-1] baloons =", "self.keys = dict(left=False, right=False, up=False, down = False) self.rotate_speed = 200.0 self.velocity_x =", "= asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys =", "= True def on_key_release(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = False", "baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b in baloons: if not", "import resources import random import math from pyglet.window import key score = 0", "dt): super(Player, self).update(dt) if self.keys['left']: self.x -= 100 * dt if self.keys['right']: self.x", "-= 100 * dt if self.keys['right']: self.x += 100 * dt if self.keys['up']:", "asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y)", "* dt self.y += self.velocity_y * dt def collides_with(self, other_object): collision_distance = self.image.width/2", "in game_objects: obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score)", "= False self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self, dt): self.x += self.velocity_x", "False elif symbol == key.DOWN: self.keys['down'] = False def update(self, dt): super(Player, self).update(dt)", "on_key_press(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = True elif symbol ==", "b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for obj in game_objects if obj.dead]: to_remove.delete()", "def update(self, dt): self.x += self.velocity_x * dt self.y += self.velocity_y * dt", "game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0),", "0.0 def update(self, dt): self.x += self.velocity_x * dt self.y += self.velocity_y *", "(actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead = True global score score +=", "collision_distance) def handle_collision_with(self, other_object): self.dead = True global score score += 1 class", "**kwargs) self.dead = False self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self, dt): self.x", "down = False) self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x def", "def on_key_release(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = False elif symbol", "== key.DOWN: self.keys['down'] = False def update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x", "= 0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def", "level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image,", "y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs)", "200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers): if symbol", "key.RIGHT: self.keys['right'] = True elif symbol == key.DOWN: self.keys['down'] = True def on_key_release(self,", "self.keys['down']: self.y -= 100 * dt player = Player(img=player_image, x=400, y=300) game_objects =", "== key.LEFT: self.keys['left'] = True elif symbol == key.RIGHT: self.keys['right'] = True elif", "True elif symbol == key.DOWN: self.keys['down'] = True def on_key_release(self, symbol, modifiers): if", "* dt if self.keys['right']: self.x += 100 * dt if self.keys['up']: self.y +=", "pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)):", "True elif symbol == key.LEFT: self.keys['left'] = True elif symbol == key.RIGHT: self.keys['right']", "< 10: asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img,", "x=400, y=300) game_objects = asteroids + [player] def update(dt): for obj in game_objects:", "= True elif symbol == key.DOWN: self.keys['down'] = True def on_key_release(self, symbol, modifiers):", "* dt def collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position,", "= pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args,", "collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance", "self.x += self.velocity_x * dt self.y += self.velocity_y * dt def collides_with(self, other_object):", "self.keys['left'] = False elif symbol == key.RIGHT: self.keys['right'] = False elif symbol ==", "elif symbol == key.LEFT: self.keys['left'] = False elif symbol == key.RIGHT: self.keys['right'] =", "image.anchor_y = image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image =", "in range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x", "= pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460,", "other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead = True global score", "if self.keys['up']: self.y += 100 * dt if self.keys['down']: self.y -= 100 *", "player.draw() for asteroid in asteroids: if not asteroid.dead: asteroid.draw() game_window.push_handlers(player) pyglet.clock.schedule_interval(update, 1/120.0) pyglet.app.run()", "if symbol == key.UP: self.keys['up'] = False elif symbol == key.LEFT: self.keys['left'] =", "<= collision_distance) def handle_collision_with(self, other_object): self.dead = True global score score += 1", "pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs):", "game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in asteroids: if", "[obj for obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear()", "player_position while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x = random.randint(0, 800) asteroid_y =", "need better design import pyglet import resources import random import math from pyglet.window", "other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead", "= random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x", "game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for", "return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] - point_2[1]) ** 2) class", "def asteroids(num_asteroids, player_position): asteroids = [] for i in range(num_asteroids): asteroid_x, asteroid_y =", "= 200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers): if", "random import math from pyglet.window import key score = 0 game_window = pyglet.window.Window()", "if self.keys['left']: self.x -= 100 * dt if self.keys['right']: self.x += 100 *", "random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x =", "self.keys['right'] = False elif symbol == key.DOWN: self.keys['down'] = False def update(self, dt):", "bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label", "= self.velocity_x def on_key_press(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = True", "elif symbol == key.RIGHT: self.keys['right'] = True elif symbol == key.DOWN: self.keys['down'] =", "symbol, modifiers): if symbol == key.UP: self.keys['up'] = True elif symbol == key.LEFT:", "y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position) class", "0), point_2=(0, 0)): \"\"\"Returns the distance between two points\"\"\" return math.sqrt((point_1[0] - point_2[0])", "super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids = [] for i in range(num_asteroids):", "2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score:", "distance(self.position, other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead = True global", "= random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image):", "== key.RIGHT: self.keys['right'] = True elif symbol == key.DOWN: self.keys['down'] = True def", "+= self.velocity_x * dt self.y += self.velocity_y * dt def collides_with(self, other_object): collision_distance", "symbol == key.UP: self.keys['up'] = True elif symbol == key.LEFT: self.keys['left'] = True", "def center_image(image): \"\"\"Sets an image's anchor point to its center\"\"\" image.anchor_x = image.width", "= True elif symbol == key.LEFT: self.keys['left'] = True elif symbol == key.RIGHT:", "score_label.text = \"Score: {}\".format(score) for b in baloons: if not b.dead and b.collides_with(player):", "self).update(dt) if self.keys['left']: self.x -= 100 * dt if self.keys['right']: self.x += 100", "if self.keys['right']: self.x += 100 * dt if self.keys['up']: self.y += 100 *", "random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets", "{}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image)", "+ other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self, other_object):", "new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 -", "- 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an", "image.width // 2 image.anchor_y = image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image =", "asteroids(num_asteroids, player_position): asteroids = [] for i in range(num_asteroids): asteroid_x, asteroid_y = player_position", "update(dt): for obj in game_objects: obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1] score_label.text", "score += 1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def", "__init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids = [] for", "\"\"\"Returns the distance between two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2 +", "= game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b in baloons:", "def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance between two points\"\"\" return math.sqrt((point_1[0]", "dt): self.x += self.velocity_x * dt self.y += self.velocity_y * dt def collides_with(self,", "= distance(self.position, other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead = True", "Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids =", "pyglet.window import key score = 0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex()", "math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite):", "class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids", "point to its center\"\"\" image.anchor_x = image.width // 2 image.anchor_y = image.height //", "2 + (point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs):", "to its center\"\"\" image.anchor_x = image.width // 2 image.anchor_y = image.height // 2", "__init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False, down = False)", "asteroids def center_image(image): \"\"\"Sets an image's anchor point to its center\"\"\" image.anchor_x =", "pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance between", "asteroids(20, player_ship.position) class Player(PhysicalObject): def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False,", "other_object): self.dead = True global score score += 1 class Asteroid(PhysicalObject): def __init__(self,", "Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position): asteroids =", "600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100", "handle_collision_with(self, other_object): self.dead = True global score score += 1 class Asteroid(PhysicalObject): def", "False elif symbol == key.RIGHT: self.keys['right'] = False elif symbol == key.DOWN: self.keys['down']", "game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b in baloons: if not b.dead and", "self.velocity_y = self.velocity_x def on_key_press(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] =", "obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw()", "self.dead = False self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self, dt): self.x +=", "random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an image's anchor point to", "i in range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position) < 10:", "= player_position while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x = random.randint(0, 800) asteroid_y", "image's anchor point to its center\"\"\" image.anchor_x = image.width // 2 image.anchor_y =", "super(Player, self).update(dt) if self.keys['left']: self.x -= 100 * dt if self.keys['right']: self.x +=", "1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids, player_position):", "= ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the", "elif symbol == key.LEFT: self.keys['left'] = True elif symbol == key.RIGHT: self.keys['right'] =", "self.keys['right'] = True elif symbol == key.DOWN: self.keys['down'] = True def on_key_release(self, symbol,", "*args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y = 0.0, 0.0", "pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance between two points\"\"\" return", "key.DOWN: self.keys['down'] = False def update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x -=", "point_2[0]) ** 2 + (point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self,", "score = 0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\")", "** 2 + (point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args,", "in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw()", "super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self,", "Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids", "pyglet import resources import random import math from pyglet.window import key score =", "dt self.y += self.velocity_y * dt def collides_with(self, other_object): collision_distance = self.image.width/2 +", "- point_2[0]) ** 2 + (point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def", "self.keys['down'] = True def on_key_release(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] =", "= False def update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x -= 100 *", "= random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation", "0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0,", "x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y =", "to_remove in [obj for obj in game_objects if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def", "= Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation = random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50", "Player(img=player_image, x=400, y=300) game_objects = asteroids + [player] def update(dt): for obj in", "player_position) < 10: asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid =", "self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self, dt): self.x += self.velocity_x * dt", "* dt if self.keys['down']: self.y -= 100 * dt player = Player(img=player_image, x=400,", "= Player(img=player_image, x=400, y=300) game_objects = asteroids + [player] def update(dt): for obj", "two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] - point_2[1]) **", "elif symbol == key.DOWN: self.keys['down'] = True def on_key_release(self, symbol, modifiers): if symbol", "asteroids = [] for i in range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x,", "range(num_asteroids): asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x =", "new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation = random.randint(0,", "* dt player = Player(img=player_image, x=400, y=300) game_objects = asteroids + [player] def", "0)): \"\"\"Returns the distance between two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2", "= self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return (actual_distance <= collision_distance) def", "the distance between two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1]", "its center\"\"\" image.anchor_x = image.width // 2 image.anchor_y = image.height // 2 player_image", "10: asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x,", "def __init__(self, *args, **kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False, down =", "modifiers): if symbol == key.UP: self.keys['up'] = False elif symbol == key.LEFT: self.keys['left']", "False elif symbol == key.LEFT: self.keys['left'] = False elif symbol == key.RIGHT: self.keys['right']", "self.keys['down'] = False def update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x -= 100", "and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for obj in game_objects if obj.dead]:", "def handle_collision_with(self, other_object): self.dead = True global score score += 1 class Asteroid(PhysicalObject):", "score_label.draw() player.draw() for asteroid in asteroids: if not asteroid.dead: asteroid.draw() game_window.push_handlers(player) pyglet.clock.schedule_interval(update, 1/120.0)", "= pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label =", "elif symbol == key.DOWN: self.keys['down'] = False def update(self, dt): super(Player, self).update(dt) if", "def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid in asteroids: if not asteroid.dead:", "= pyglet.window.Window() pyglet.resource.path = ['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0,", "anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300) asteroids = asteroids(20, player_ship.position) class Player(PhysicalObject):", "== key.RIGHT: self.keys['right'] = False elif symbol == key.DOWN: self.keys['down'] = False def", "import pyglet import resources import random import math from pyglet.window import key score", "= image.width // 2 image.anchor_y = image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image", "self.x += 100 * dt if self.keys['up']: self.y += 100 * dt if", "better design import pyglet import resources import random import math from pyglet.window import", "- point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs)", "modifiers): if symbol == key.UP: self.keys['up'] = True elif symbol == key.LEFT: self.keys['left']", "actual_distance = distance(self.position, other_object.position) return (actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead =", "= random.randint(0, 360) asteroids.append(new_asteroid) return asteroids def center_image(image): \"\"\"Sets an image's anchor point", "key.LEFT: self.keys['left'] = True elif symbol == key.RIGHT: self.keys['right'] = True elif symbol", "**kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y = 0.0, 0.0 def", "self.y += self.velocity_y * dt def collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2", "True elif symbol == key.RIGHT: self.keys['right'] = True elif symbol == key.DOWN: self.keys['down']", "== key.UP: self.keys['up'] = True elif symbol == key.LEFT: self.keys['left'] = True elif", "return (actual_distance <= collision_distance) def handle_collision_with(self, other_object): self.dead = True global score score", "random.randint(0, 800) asteroid_y = random.randint(0, 600) new_asteroid = Asteroid(img=ast_img, x=asteroid_x, y=asteroid_y) new_asteroid.rotation =", "update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x -= 100 * dt if self.keys['right']:", "False self.velocity_x, self.velocity_y = 0.0, 0.0 def update(self, dt): self.x += self.velocity_x *", "symbol == key.DOWN: self.keys['down'] = True def on_key_release(self, symbol, modifiers): if symbol ==", "key.DOWN: self.keys['down'] = True def on_key_release(self, symbol, modifiers): if symbol == key.UP: self.keys['up']", "on_key_release(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = False elif symbol ==", "asteroid_x, asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x = random.randint(0,", "* dt if self.keys['up']: self.y += 100 * dt if self.keys['down']: self.y -=", "= False) self.rotate_speed = 200.0 self.velocity_x = 0 self.velocity_y = self.velocity_x def on_key_press(self,", "self.velocity_y = 0.0, 0.0 def update(self, dt): self.x += self.velocity_x * dt self.y", "y=300) game_objects = asteroids + [player] def update(dt): for obj in game_objects: obj.update(dt)", "obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b", "+= 100 * dt if self.keys['up']: self.y += 100 * dt if self.keys['down']:", "point_2=(0, 0)): \"\"\"Returns the distance between two points\"\"\" return math.sqrt((point_1[0] - point_2[0]) **", "= pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship = pyglet.sprite.Sprite(img=player_image, x=400,", "random.randint(0, 360) new_asteroid.velocity_x = random.random()*100 - 50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation", "= \"Score: {}\".format(score) for b in baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player)", "False def update(self, dt): super(Player, self).update(dt) if self.keys['left']: self.x -= 100 * dt", "game_objects[-1] baloons = game_objects[0:-1] score_label.text = \"Score: {}\".format(score) for b in baloons: if", "pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460)", "anchor point to its center\"\"\" image.anchor_x = image.width // 2 image.anchor_y = image.height", "for b in baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in", "**kwargs): super(Player,self).__init__(*args, **kwargs) self.keys = dict(left=False, right=False, up=False, down = False) self.rotate_speed =", "+= 1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid, self).__init__(*args, **kwargs) def asteroids(num_asteroids,", "50 new_asteroid.velocity_y = random.random()*100 - 50 new_asteroid.rotation = random.randint(0, 360) asteroids.append(new_asteroid) return asteroids", "100 * dt if self.keys['up']: self.y += 100 * dt if self.keys['down']: self.y", "+ (point_1[1] - point_2[1]) ** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject,", "y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship =", "= False elif symbol == key.LEFT: self.keys['left'] = False elif symbol == key.RIGHT:", "= pyglet.resource.image(\"player.png\") score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace", "asteroid_y = player_position while distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x = random.randint(0, 800)", "dt if self.keys['down']: self.y -= 100 * dt player = Player(img=player_image, x=400, y=300)", "from pyglet.window import key score = 0 game_window = pyglet.window.Window() pyglet.resource.path = ['./resources']", "image.height // 2 player_image = pyglet.resource.image(\"ship.png\") bullet_image = pyglet.resource.image(\"player.png\") asteroid_image = pyglet.resource.image(\"player.png\") score_label", "= False elif symbol == key.RIGHT: self.keys['right'] = False elif symbol == key.DOWN:", "['./resources'] pyglet.resource.reindex() ast_img = pyglet.resource.image(\"player.png\") def distance(point_1=(0, 0), point_2=(0, 0)): \"\"\"Returns the distance", "def update(dt): for obj in game_objects: obj.update(dt) player = game_objects[-1] baloons = game_objects[0:-1]", "100 * dt player = Player(img=player_image, x=400, y=300) game_objects = asteroids + [player]", "def on_key_press(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = True elif symbol", "True def on_key_release(self, symbol, modifiers): if symbol == key.UP: self.keys['up'] = False elif", "** 2) class PhysicalObject(pyglet.sprite.Sprite): def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead =", "key.UP: self.keys['up'] = True elif symbol == key.LEFT: self.keys['left'] = True elif symbol", "# need better design import pyglet import resources import random import math from", "symbol == key.LEFT: self.keys['left'] = True elif symbol == key.RIGHT: self.keys['right'] = True", "symbol, modifiers): if symbol == key.UP: self.keys['up'] = False elif symbol == key.LEFT:", "+= 100 * dt if self.keys['down']: self.y -= 100 * dt player =", "if obj.dead]: to_remove.delete() game_objects.remove(to_remove) @game_window.event def on_draw(): game_window.clear() level_label.draw() score_label.draw() player.draw() for asteroid", "self.keys['left'] = True elif symbol == key.RIGHT: self.keys['right'] = True elif symbol ==", "= True global score score += 1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs):", "self.keys['up'] = True elif symbol == key.LEFT: self.keys['left'] = True elif symbol ==", "symbol == key.LEFT: self.keys['left'] = False elif symbol == key.RIGHT: self.keys['right'] = False", "def collides_with(self, other_object): collision_distance = self.image.width/2 + other_object.image.width/2 actual_distance = distance(self.position, other_object.position) return", "x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center') center_image(player_image) player_ship", "score_label = pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2,", "= 0.0, 0.0 def update(self, dt): self.x += self.velocity_x * dt self.y +=", "def __init__(self, *args, **kwargs): super(PhysicalObject, self).__init__(*args, **kwargs) self.dead = False self.velocity_x, self.velocity_y =", "pyglet.text.Label(text=\"Score: {}\".format(score), x=10, y=460) level_label = pyglet.text.Label(text=\"Balloon Blaster Mace Ball\", x=game_window.width//2, y=460, anchor_x='center')", "self.y += 100 * dt if self.keys['down']: self.y -= 100 * dt player", "distance((asteroid_x, asteroid_y), player_position) < 10: asteroid_x = random.randint(0, 800) asteroid_y = random.randint(0, 600)", "True global score score += 1 class Asteroid(PhysicalObject): def __init__(self, *args, **kwargs): super(Asteroid,", "100 * dt if self.keys['down']: self.y -= 100 * dt player = Player(img=player_image,", "= False elif symbol == key.DOWN: self.keys['down'] = False def update(self, dt): super(Player,", "baloons: if not b.dead and b.collides_with(player): b.handle_collision_with(player) for to_remove in [obj for obj" ]
[ "If previous entries exist, replace the ones displayed # (up to 4 latest)", "= tk.Label( top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label =", "for active or sold shares. After viewing or editing you can save changes", "= manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and", "\"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy()", "top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390)", "= tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per", "pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are", "shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window,", "padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save)", "text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10)", "entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share)", "tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share,", "# check if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get()", "shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT)", "date:\", \"Divident:\", \"Divident date:\", \"Exit price (per share):\", \"Exit dete:\"] for txt in", "tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4,", "entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } # cost depends on changing", "= str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save in order all user input", "(per share):\", \"Exit dete:\"] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT)", "tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4,", "= tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5,", "date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window,", "old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2,", "width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2,", "total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save", "column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window,", "bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\",", "ones (in case edits were made) else: list_of_entries = (div_1, div_2, div_3, div_4,", "old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date =", "pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window,", "correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and", "shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\",", "exit_date.get(), \"Cost\": '' } # cost depends on changing current share price but", "label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity =", "entries exist, replace the ones displayed # (up to 4 latest) with the", "(up to 4 latest) with the new ones (in case edits were made)", "last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display = 4 for counter in range(0, dividends_to_display):", "scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels = [\"Name:\",", "div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT)", "choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure you want to", "div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 =", "column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window))", "= tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4,", "font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let me know when:\", font=FONT)", "font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button( top_window,", "share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save in order all", "tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20)", "top_window, text=\"Price is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label =", "pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm to a share\"\"\" prof = share[1]", "\"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\":", "number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if all_dividents:", "font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1,", "label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is lower or equal to:\",", "exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } # cost depends on changing current share", "column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent,", "= tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0,", "in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window,", "need to consider fixed selling price. # Therefore sold cost can be calculated", "tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3,", "on a closed or active share opens a menu windows with buttons allowing", "\"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if", "== 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4,", "padx=5, pady=5) label = tk.Label( top_window, text=\"Price is higher or equal to:\", font=FONT)", "tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\",", "you to edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button =", "menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button", "share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save in order all user", "div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT)", "(per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price (per share):\", \"Exit dete:\"]", "tkinter as tk from tkinter import messagebox from Files import (shares_page, manage_db, calculate,", "text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm to", "menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0,", "share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and", "dividend_date entries. # If no previous inputs: if old_share[8] == \"\": list_of_entries =", "can be calculated and added to dictionary # without the need of calculating", "want to delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent)", "active or sold shares. After viewing or editing you can save changes \"\"\"", "padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9,", "= ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >=", "unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure you want to delete this?\"):", "date:\", ] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") #", "else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 =", "old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9,", "div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends)", "add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(),", "a new # entry, this allows you to view and edit previous entries", "low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent)", "padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry =", "ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9,", "manage_db, calculate, scrap_web) FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows you to add", "in order all user input from dividend # and dividend_date entries. # If", "font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3", "for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of last for loop", "exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window,", "date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries =", "= (div_2, div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] =", "gives this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value)", "or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy()", "font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4", "you want to delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0])", "\"Delete\", \"Are you sure you want to delete this?\"): if to_delete[6] == \"\":", "and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or", "is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window,", "for counter in range(0, dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first", "enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check", "tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT)", "share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\":", "price but if a # share was sold, you need to consider fixed", "= old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in range(0, number_of_dividends): for div_or_date, value", "padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 =", "to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is lower or", "entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries exist,", "= ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\"", "= tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds", "tk.Label( top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label(", "tk.Label( top_window, text=\"Price is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry", "as tk from tkinter import messagebox from Files import (shares_page, manage_db, calculate, scrap_web)", "top_window, text=\"Price is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry =", "tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1,", "font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\"", "to consider fixed selling price. # Therefore sold cost can be calculated and", "import tkinter as tk from tkinter import messagebox from Files import (shares_page, manage_db,", "manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table(", "ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity =", "font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price", "= (div_1, div_2, div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5)", "sure you want to delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\",", "share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get() low = low_price_entry.get()", "share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT,", "def menu_window(parent, share): \"\"\"right clicking on a closed or active share opens a", "name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\",", "# ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window,", "share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get()", "= tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5,", "or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is", "\"\"\"right clicking on a closed or active share opens a menu windows with", "need of calculating it all over again: if exit_price.get() != \"\": total_buying_price =", "for __ in range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents) > 0: all_dividents.remove(", "new ones (in case edits were made) else: list_of_entries = (div_1, div_2, div_3,", "font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking on a closed", "# share was sold, you need to consider fixed selling price. # Therefore", "of calculating it all over again: if exit_price.get() != \"\": total_buying_price = int(", "with the new ones (in case edits were made) else: list_of_entries = (div_1,", "(div_1, div_2, div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear", "check if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() ==", "name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window,", "entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5)", "price (per share):\", \"Exit dete:\"] for txt in list_of_labels: label = tk.Label(top_window, text=txt,", "does have a dividend inputed, insert up to four last # dividend entries", "= {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\",", "and leave last (5th) field empty for a new # entry, this allows", "sold cost can be calculated and added to dictionary # without the need", "font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button", "value) else: list_of_entries = (div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends", "div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT)", "high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10,", "# cost depends on changing current share price but if a # share", "tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm", "= tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3,", "top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry", "width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share does have a dividend inputed,", "if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\")", "menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda:", "manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\",", "changing current share price but if a # share was sold, you need", "padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7])", "column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button(", "tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT)", "to edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button(", "\"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price(", "value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5", "\"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent,", "date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5)", "div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT)", "column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button =", "old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 =", "= tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let", "delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2,", "# If no previous inputs: if old_share[8] == \"\": list_of_entries = (div_2, div_3,", "low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350,", "or exit_date.get() == ''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0])", "div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT)", "set_alarm(parent, share, choice_window): \"\"\"adds alarm to a share\"\"\" prof = share[1] alarm =", "div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if", "\"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure you want to delete", "dividend entries and leave last (5th) field empty for a new # entry,", "all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and", "0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5,", "[\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price", "high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6]", "close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def", "to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit window for active or", "padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5,", "scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) #", "= (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in", "entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window,", "of last for loop gives you dividend and # last gives this dividend's", "div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for", "font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm to a", "if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(),", "div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price =", "font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button(", "height=600, width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry", "number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of last for", "# LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", ]", "case edits were made) else: list_of_entries = (div_1, div_2, div_3, div_4, div_5) list_of_dates", "shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display", "width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5)", "(div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for __", "pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0,", "tkinter import messagebox from Files import (shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria", "calculating it all over again: if exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"])", "last for loop gives you dividend and # last gives this dividend's date", "div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents", "== \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label =", "4: dividends_to_display = 4 for counter in range(0, dividends_to_display): for div_or_date, value in", "padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def", "if old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes = ( div_date_4, div_date_3,", "= tk.Label( top_window, text=\"Price is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5)", "if old_share[8] == \"\": list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates = (div_date_2,", "to view and edit previous entries if old_share[8]: list_of_entries = (div_4, div_3, div_2,", "lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT)", "column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0,", "leave last (5th) field empty for a new # entry, this allows you", "made) else: list_of_entries = (div_1, div_2, div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2,", "tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1,", "you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share =", "alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\",", "the need of calculating it all over again: if exit_price.get() != \"\": total_buying_price", "high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy()", "save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(),", "a menu windows with buttons allowing you to edit/add alarm/delete share \"\"\" menu_window", "for counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"]", "latest) with the new ones (in case edits were made) else: list_of_entries =", "= tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right", "run of last for loop gives you dividend and # last gives this", "add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1,", "equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1,", "share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in", "last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in range(0, number_of_dividends): for div_or_date,", "{\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\":", "consider fixed selling price. # Therefore sold cost can be calculated and added", "entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if all", "edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent,", "= tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9,", "str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save in order all user input from", "column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0,", "save(): high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low)", "inputs: if old_share[8] == \"\": list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates =", "pady=5) label = tk.Label( top_window, text=\"Price is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\",", "padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1])", "quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5)", "\"Quantity:\", \"Entry price (per share):\", \"Entry date:\", ] for txt in list_of_labels: label", "cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent,", "= tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20,", "div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT)", "= share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get() low =", "manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit window", "column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0,", "input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get())", "dividend inputed, insert up to four last # dividend entries and leave last", "if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if entrie.get():", "with buttons allowing you to edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400,", "be calculated and added to dictionary # without the need of calculating it", "height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5)", "tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5,", "div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 =", "else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit", "in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window,", "list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5)", "= tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT)", "gives you dividend and # last gives this dividend's date if div_or_date ==", "entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window,", "and # last gives this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value)", "you need to consider fixed selling price. # Therefore sold cost can be", "def save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\":", "\"Quantity:\", \"Entry price (per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price (per", "= tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9,", "exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame", "\"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price,", "\"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window", "and save in order all user input from dividend # and dividend_date entries.", "def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure", "= tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete,", "this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\",", "alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share,", "div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\"", "command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\",", "quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT)", "width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window))", "share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries exist, replace", "padx=5) # if share does have a dividend inputed, insert up to four", "\"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window =", "div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if", "know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is higher", "sold shares. After viewing or editing you can save changes \"\"\" def save():", "div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for __ in", "top_window = tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price", "width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10)", "name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels =", "share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] =", "command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\",", "<filename>Files/add_edit_delete_share.py import tkinter as tk from tkinter import messagebox from Files import (shares_page,", "top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let me know", "width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button(", "for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name", "share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent,", "share does have a dividend inputed, insert up to four last # dividend", "top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label(", "amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window,", "if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\",", "first run of last for loop gives you dividend and # last gives", "div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2,", "pady=5) amount_label = tk.Label( top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5)", "= len(last_dividends) for counter in range(0, number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")):", "= tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200,", "delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3,", "old_share[8] == \"\": list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3,", "share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\":", "counter in range(0, number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run", "div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of last for loop gives", "padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2])", "last_dividends[counter].split(\"-\")): # first run of last for loop gives you dividend and #", "# if share does have a dividend inputed, insert up to four last", "\"Entry date:\", ] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\")", "in range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for", "name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price =", "and edit previous entries if old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes", "enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous", "padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9,", "padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0,", "the ones displayed # (up to 4 latest) with the new ones (in", "share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels = [\"Name:\",", "padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9,", "== \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy()", "= tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9,", "width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5)", "\"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } # cost depends on changing current", "delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT,", "= tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20,", "and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() ==", "selling price. # Therefore sold cost can be calculated and added to dictionary", "0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\"", "alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high)", "\"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600,", "order all user input from dividend # and dividend_date entries. # If no", "column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0,", "to delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else:", "tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share does have a dividend", "= tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4,", "= int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) #", "last # dividend entries and leave last (5th) field empty for a new", "if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if", "dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of last for", "four last # dividend entries and leave last (5th) field empty for a", "share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\":", "old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT,", "share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390)", "# LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", \"Divident:\",", "list_of_dates[counter].get()) # If previous entries exist, replace the ones displayed # (up to", "\"Entry price (per share):\", \"Entry date:\", ] for txt in list_of_labels: label =", "= tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent,", "tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete, choice_window):", "label = tk.Label( top_window, text=\"Price is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5,", "menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent,", "!= \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs(", "tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1,", "last gives this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0,", "or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2,", "} # cost depends on changing current share price but if a #", "\"SellingDate\": exit_date.get(), \"Cost\": '' } # cost depends on changing current share price", "= high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if", "tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3,", "# ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity", "# Therefore sold cost can be calculated and added to dictionary # without", "\"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent,", "padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9,", "for counter in range(0, number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first", "closed or active share opens a menu windows with buttons allowing you to", "= tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5,", "width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\",", "4 latest) with the new ones (in case edits were made) else: list_of_entries", "(5th) field empty for a new # entry, this allows you to view", "= tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\",", "a share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high =", "= (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries exist, replace the", "dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 =", "command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0,", "range(0, dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of last", "all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if all input", "width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5)", "div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"]", "width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3,", "field empty for a new # entry, this allows you to view and", "all over again: if exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price =", "total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price))", "txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name =", "else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2,", "manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window", "''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\",", "4 for counter in range(0, dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): #", "and dividend_date entries. # If no previous inputs: if old_share[8] == \"\": list_of_entries", "shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit window for active or sold", "to a share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high", "to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure you want", "share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent)", "for a new # entry, this allows you to view and edit previous", "edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT,", "\"\"\"display edit window for active or sold shares. After viewing or editing you", "= old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display = 4 for counter in", "column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window,", "new # entry, this allows you to view and edit previous entries if", "list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window,", "font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4", "pady=5) label = tk.Label( top_window, text=\"Price is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\",", "= tk.Label( top_window, text=\"Price is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5)", "= tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5,", "last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display = 4 for counter", "len(last_dividends) for counter in range(0, number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): #", "padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button", "tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3,", ") def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you", "add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button(", "\"Exit dete:\"] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") #", "(div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get())", "column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1", "div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if", "messagebox from Files import (shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria 12\" def", "== '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if old_share[6]", "if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share)", "column=4, padx=5) # if share does have a dividend inputed, insert up to", "help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm to a share\"\"\"", "padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9,", "all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" +", "+ list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if all input correct if(manage_db.check_if_valid_name(name.get()) and", "to four last # dividend entries and leave last (5th) field empty for", "name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5)", "= tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5,", "total_buying_price, total_selling_price)) # Collect and save in order all user input from dividend", "this allows you to view and edit previous entries if old_share[8]: list_of_entries =", "\"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } # cost", "[\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", ] for txt in list_of_labels:", "column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window,", "div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display = 4 for", "column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share,", "in range(0, number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of", "font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window,", "(div_4, div_3, div_2, div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends =", "view and edit previous entries if old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1)", "# If previous entries exist, replace the ones displayed # (up to 4", "manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels", "div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 =", "share):\", \"Exit dete:\"] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\")", "dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries", "column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0,", "number_of_entries_to_clear): if all_dividents: # if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie", "alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1,", "padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3])", "to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\":", "+ list_of_dates[counter].get()) # If previous entries exist, replace the ones displayed # (up", "from dividend # and dividend_date entries. # If no previous inputs: if old_share[8]", "if len(last_dividends) >= 4: dividends_to_display = 4 for counter in range(0, dividends_to_display): for", "value in enumerate( last_dividends[counter].split(\"-\")): # first run of last for loop gives you", "close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame,", "(div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends =", "edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0,", "pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0,", "{\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\":", "menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button", "higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price", "= low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] == \"\":", "a dividend inputed, insert up to four last # dividend entries and leave", "me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is", "== \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent)", "low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2])", "> 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() +", "text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2,", "no previous inputs: if old_share[8] == \"\": list_of_entries = (div_2, div_3, div_4, div_5)", "font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1,", "choice_window): \"\"\"display edit window for active or sold shares. After viewing or editing", "text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button =", "= \"Calabria 12\" def add_shares(parent): \"\"\"allows you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get())", "dividend # and dividend_date entries. # If no previous inputs: if old_share[8] ==", "import (shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows you", "(per share):\", \"Entry date:\", ] for txt in list_of_labels: label = tk.Label(top_window, text=txt,", "12\" def add_shares(parent): \"\"\"allows you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get())", "# first run of last for loop gives you dividend and # last", "div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in range(0, number_of_dividends):", "list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\")", "== 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2, div_3)", "else: list_of_entries = (div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends =", "if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0])", "width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5)", "\"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390)", "] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES:", "entries and leave last (5th) field empty for a new # entry, this", "= (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for", "div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4", "if exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"]", "label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1])", "number_of_dividends = len(last_dividends) for counter in range(0, number_of_dividends): for div_or_date, value in enumerate(", "padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9,", "previous entries exist, replace the ones displayed # (up to 4 latest) with", "div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 =", "low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button( top_window, text=\"Add\",", "exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button", "= tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0,", "tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1,", "dictionary # without the need of calculating it all over again: if exit_price.get()", "else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof,", "choice_window): \"\"\"adds alarm to a share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy()", "0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2, div_3) list_of_notes", "share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\", command=lambda:", "quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window,", "cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno(", "= {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(),", "div_2, div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear =", "if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1,", "tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT)", "share opens a menu windows with buttons allowing you to edit/add alarm/delete share", "# dividend entries and leave last (5th) field empty for a new #", "div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents =", "padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share does", ">= 4: dividends_to_display = 4 for counter in range(0, dividends_to_display): for div_or_date, value", "if all_dividents: # if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in", "LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", ] for", "sold, you need to consider fixed selling price. # Therefore sold cost can", "div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for", "width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7,", "len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get()", "\"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' }", "(share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries exist, replace the ones", "command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if", "to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0])", "width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5)", "= tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20)", "clicking on a closed or active share opens a menu windows with buttons", "column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share", "div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie", "date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\",", "and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\",", "top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window,", "font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5)", "share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS:", "name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT)", "= tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6,", "div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window, width=9, font=FONT)", "import messagebox from Files import (shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria 12\"", "= 4 all_dividents = old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if all_dividents: #", "name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": ''", "= tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9,", "add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking on a closed or active", "pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window))", "you to view and edit previous entries if old_share[8]: list_of_entries = (div_4, div_3,", "to dictionary # without the need of calculating it all over again: if", "value) else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2, div_3) list_of_notes = (div_date_1,", "menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button =", "value) div_5 = tk.Entry(top_window, width=9, font=FONT) div_5.grid(row=4, column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9,", "column=3, pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\",", "windows with buttons allowing you to edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None,", "share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure you want to delete this?\"): if", "column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def", "\"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent)", "shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\",", "= tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0,", "= (div_4, div_3, div_2, div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends", "label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry", "was sold, you need to consider fixed selling price. # Therefore sold cost", "\"\"\" def save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(),", "Collect and save in order all user input from dividend # and dividend_date", "padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent,", "old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame =", "add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share):", "font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button(", "old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\",", "list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends)", "loop gives you dividend and # last gives this dividend's date if div_or_date", "list_of_entries = (div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\")", "menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda:", "'' } # cost depends on changing current share price but if a", "font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0,", "def edit(parent, old_share, choice_window): \"\"\"display edit window for active or sold shares. After", "if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if entrie.get():", "= tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1,", "all_dividents = old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents)", "# if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if", "you sure you want to delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\",", "entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents)", "column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button( top_window, text=\"Add\", font=FONT,", "top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\",", "# and dividend_date entries. # If no previous inputs: if old_share[8] == \"\":", "edits were made) else: list_of_entries = (div_1, div_2, div_3, div_4, div_5) list_of_dates =", "\"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price (per share):\", \"Exit dete:\"] for txt", "\"Exit price (per share):\", \"Exit dete:\"] for txt in list_of_labels: label = tk.Label(top_window,", "div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT)", "else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"],", "column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window,", "shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS", "manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def", "in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If", "manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get()", "font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share does have a dividend inputed, insert", "columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button =", "entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } # cost depends", "cost can be calculated and added to dictionary # without the need of", "div_date_3, div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for __ in range(0,", "\"Entry price (per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price (per share):\",", "tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5)", "tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button", "if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get())", "command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking on a closed or", "pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share,", "column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window,", "(div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in range(0,", "tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking", "calculated and added to dictionary # without the need of calculating it all", "changes \"\"\" def save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\":", "counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] =", "# last gives this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else:", "= tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4,", "and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent)", "editing you can save changes \"\"\" def save(): share = {\"Name\": name.get(), \"Quantity\":", "choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit window for active or sold shares.", "font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) #", "len(last_dividends) >= 4: dividends_to_display = 4 for counter in range(0, dividends_to_display): for div_or_date,", "manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)):", "def save(): high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high,", "width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label =", "height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0,", "padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9,", "div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter,", "text=\"Delete\", font=FONT, bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button =", "entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries exist, replace the ones displayed #", "( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4:", "list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9,", "width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10)", "old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price(", "from Files import (shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria 12\" def add_shares(parent):", "text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking on a", "div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if", "tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30)", "height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label", "viewing or editing you can save changes \"\"\" def save(): share = {\"Name\":", "== \"\": list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4,", "price. # Therefore sold cost can be calculated and added to dictionary #", "column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 = tk.Entry(top_window,", "list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] =", "width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1,", "share[\"Dividends\"] = '|'.join(all_dividents) # check if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and", "== \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"]", "= [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit", "\"\"\"adds alarm to a share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def", "\"Are you sure you want to delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table(", "date:\", \"Exit price (per share):\", \"Exit dete:\"] for txt in list_of_labels: label =", "counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get())", "Files import (shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows", "\"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get()))", "allowing you to edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button", "(in case edits were made) else: list_of_entries = (div_1, div_2, div_3, div_4, div_5)", "tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT)", "entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1", "width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10,", "list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"]", "list_of_entries = (div_1, div_2, div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3, div_date_4,", "have a dividend inputed, insert up to four last # dividend entries and", "width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5)", "column=0, padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5)", "cost depends on changing current share price but if a # share was", "for counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" +", "current share price but if a # share was sold, you need to", "menu_window(parent, share): \"\"\"right clicking on a closed or active share opens a menu", "for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name", "font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price", "tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save)", "edit window for active or sold shares. After viewing or editing you can", "= tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button( frame,", "old_share, choice_window): \"\"\"display edit window for active or sold shares. After viewing or", "low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] ==", "width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5)", "and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\":", "= tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9,", "previous entries if old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes = (", "save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\":", "label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9, font=FONT)", "entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window,", "font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2", "\"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] ==", "range(0, number_of_dividends): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of last", "width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10)", "tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT)", "label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9, font=FONT)", "tk.Toplevel(parent, height=600, width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\",", "entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1, padx=5)", "exist, replace the ones displayed # (up to 4 latest) with the new", "div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display =", "depends on changing current share price but if a # share was sold,", "div_date_4.grid(row=5, column=4, padx=5) # if share does have a dividend inputed, insert up", "padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4])", "this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else:", "date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(),", "in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) #", "manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) #", "top_window = tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price", "div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display = 4", "# (up to 4 latest) with the new ones (in case edits were", "LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", \"Divident:\", \"Divident", "dividends_to_display = 4 for counter in range(0, dividends_to_display): for div_or_date, value in enumerate(", "tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1,", "div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT)", "inputed, insert up to four last # dividend entries and leave last (5th)", "list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2, div_3) list_of_notes =", "text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT,", "high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5)", "font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT) entry_date.grid(row=3, column=1,", "+ \"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if all input correct", "you dividend and # last gives this dividend's date if div_or_date == 0:", "div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3 =", "exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] =", "is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10,", "\"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\",", "else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels", "div_date_3 = tk.Entry(top_window, width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT)", "on changing current share price but if a # share was sold, you", "quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date =", "div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display", "tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date = tk.Entry(top_window, width=9, font=FONT)", "add_shares(parent): \"\"\"allows you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())):", "entries. # If no previous inputs: if old_share[8] == \"\": list_of_entries = (div_2,", "font=FONT) high_price_entry.grid(row=2, column=1, padx=5, pady=5) low_price_entry = tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5,", "\"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label = tk.Label(", "list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1)", "set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT, bg=\"red\",", "if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries", "dividend and # last gives this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0,", "padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking on a closed or active share", "bg=\"red\", command=lambda: delete(parent, share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window,", "without the need of calculating it all over again: if exit_price.get() != \"\":", "= old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents) >", "share price but if a # share was sold, you need to consider", "in range(0, dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run of", "column=5, padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window,", "share, menu_window)) delete_button.grid(row=0, column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy)", "edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200) edit_button = tk.Button( menu_window,", "def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(),", "list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", ] for txt", "div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"]", "tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0,", "high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5)", "entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button =", "manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window =", "and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if old_share[6] == \"\":", "font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5)", "width=9, font=FONT) div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5)", "if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == ''", "all user input from dividend # and dividend_date entries. # If no previous", "exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window,", "dete:\"] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES", "name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price", "= tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button", "font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is lower or equal", "edit previous entries if old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes =", "padx=10) date = tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window,", "\"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\",", "old_share[8].split(\"|\") last_dividends.reverse() if len(last_dividends) >= 4: dividends_to_display = 4 for counter in range(0,", "font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1,", "width=390) # LABELS: list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\",", "choice_window.destroy() def save(): high = high_price_entry.get() low = low_price_entry.get() if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof,", "and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\":", "share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window, text=\"Alarm\", font=FONT, bg=\"green\",", "__ in range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1])", "calculate, scrap_web) FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows you to add share\"\"\"", "the new ones (in case edits were made) else: list_of_entries = (div_1, div_2,", "\"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent)", "\"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } # cost depends on", "else: list_of_entries = (div_1, div_2, div_3, div_4, div_5) list_of_dates = (div_date_1, div_date_2, div_date_3,", "pady=5) def menu_window(parent, share): \"\"\"right clicking on a closed or active share opens", "( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" +", "alarm to a share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save():", "previous inputs: if old_share[8] == \"\": list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates", "window for active or sold shares. After viewing or editing you can save", "list_of_notes[counter].insert(0, value) else: list_of_entries = (div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3)", "delete(parent, to_delete, choice_window): \"\"\"deletes unwanted share\"\"\" if messagebox.askyesno( \"Delete\", \"Are you sure you", "= 4 for counter in range(0, dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")):", "list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get())", "frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button", "div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6])", "div_date_3.grid(row=5, column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 =", "if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5 = tk.Entry(top_window, width=9,", "it all over again: if exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price", "\"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share,", "manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600,", "= (div_1, div_2, div_3) list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends", "choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry", "= tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per", "manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else: manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window =", "menu windows with buttons allowing you to edit/add alarm/delete share \"\"\" menu_window =", "font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5,", "ones displayed # (up to 4 latest) with the new ones (in case", "help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window):", "= \"\" if div_1.get(): share[\"Dividends\"] = ( div_1.get()+\"-\"+div_date_1.get()) for counter, entrie in enumerate(list_of_entries):", "to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window, width=10, font=FONT) high_price_entry.grid(row=2, column=1, padx=5,", "to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window):", "frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0,", "(div_2, div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\"", "width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\",", "share was sold, you need to consider fixed selling price. # Therefore sold", "+ entrie.get()+\"-\" + list_of_dates[counter].get()) # If previous entries exist, replace the ones displayed", "from tkinter import messagebox from Files import (shares_page, manage_db, calculate, scrap_web) FONT =", "were made) else: list_of_entries = (div_1, div_2, div_3, div_4, div_5) list_of_dates = (div_date_1,", "manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\",", "height=600, width=390) # LABELS list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry", "if a # share was sold, you need to consider fixed selling price.", "allows you to view and edit previous entries if old_share[8]: list_of_entries = (div_4,", "all_dividents: # if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries):", "again: if exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"])", "list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9,", "command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10, pady=5)", "up to four last # dividend entries and leave last (5th) field empty", "div_3, div_2, div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\")", "tk.Button(frame, text=\"Edit\", font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\",", "font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is higher or equal", "price (per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price (per share):\", \"Exit", "edit(parent, old_share, choice_window): \"\"\"display edit window for active or sold shares. After viewing", "'|'.join(all_dividents) # check if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get()) or", "padx=5, pady=5) label = tk.Label( top_window, text=\"Price is lower or equal to:\", font=FONT)", "added to dictionary # without the need of calculating it all over again:", "width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2, padx=5)", "text=\"Alarm\", font=FONT, bg=\"green\", command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button(", "= tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2,", "div_date_4, div_date_5) number_of_entries_to_clear = 4 all_dividents = old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear):", "\"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\",", "width=9, font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4,", "buttons allowing you to edit/add alarm/delete share \"\"\" menu_window = tk.Toplevel(master=None, width=400, height=200)", "tk.Button( menu_window, text=\"Edit\", font=FONT, bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20)", "a # share was sold, you need to consider fixed selling price. #", "text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes unwanted", "old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=5) quantity.insert(0, old_share[2]) entry_price =", "enumerate( last_dividends[counter].split(\"-\")): # first run of last for loop gives you dividend and", "Therefore sold cost can be calculated and added to dictionary # without the", "font=FONT) entry_date.grid(row=3, column=1, padx=5) entry_date.insert(0, old_share[4]) div_1 = tk.Entry(top_window, width=9, font=FONT) div_1.grid(row=4, column=1,", "= tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share does have a", "user input from dividend # and dividend_date entries. # If no previous inputs:", "txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name =", "quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": entry_date.get(), \"SellingPrice\": exit_price.get(), \"SellingDate\": exit_date.get(), \"Cost\": '' } #", "old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if", "\"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit window for", "tk.Label(top_window, text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1,", "or active share opens a menu windows with buttons allowing you to edit/add", "list_of_labels = [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", \"Divident:\", \"Divident date:\",", "tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5, column=2,", "tk.Label( top_window, text=\"Price is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label", "After viewing or editing you can save changes \"\"\" def save(): share =", "int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save in order", "# entry, this allows you to view and edit previous entries if old_share[8]:", "width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\",", "for loop gives you dividend and # last gives this dividend's date if", "div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get():", "price (per share):\", \"Entry date:\", ] for txt in list_of_labels: label = tk.Label(top_window,", "share):\", \"Entry date:\", \"Divident:\", \"Divident date:\", \"Exit price (per share):\", \"Exit dete:\"] for", "pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button = tk.Button( top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\",", "column=3, padx=5) div_4 = tk.Entry(top_window, width=9, font=FONT) div_4.grid(row=4, column=4, padx=5) div_date_4 = tk.Entry(top_window,", "entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date = tk.Entry(top_window, width=9, font=FONT)", "== ''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table(", "# without the need of calculating it all over again: if exit_price.get() !=", "old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in range(0, number_of_dividends): for div_or_date, value in", "prof = share[1] alarm = manage_db.fetch_alarm(prof) choice_window.destroy() def save(): high = high_price_entry.get() low", "but if a # share was sold, you need to consider fixed selling", "= tk.Entry(top_window, width=9, font=FONT) date.grid(row=3, column=1, padx=10) add_button = tk.Button( top_window, text=\"Add\", font=FONT,", "padx=5) exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame,", "high, low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent,", "or sold shares. After viewing or editing you can save changes \"\"\" def", "old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents) > 0:", "label = tk.Label( top_window, text=\"Price is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5,", "manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(),", "width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0, old_share[1]) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1,", "tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2,", "\"\", \"SellingDate\": \"\", \"Dividends\": \"\"} manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( name.get(), scrap_web.pull_current_price(name.get())) shares_page.Shares.curent_canvas(parent) top_window.destroy() top_window", "text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let me know when:\",", "\"Divident date:\", \"Exit price (per share):\", \"Exit dete:\"] for txt in list_of_labels: label", "equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is lower", "top_window, text=\"Add\", font=FONT, command=save) add_button.grid(sticky=\"nw\", padx=5, pady=5) def menu_window(parent, share): \"\"\"right clicking on", "share, choice_window): \"\"\"adds alarm to a share\"\"\" prof = share[1] alarm = manage_db.fetch_alarm(prof)", "font=FONT) div_date_2.grid(row=5, column=2, padx=5) div_3 = tk.Entry(top_window, width=9, font=FONT) div_3.grid(row=4, column=3, padx=5) div_date_3", "exit_date.insert(0, old_share[7]) frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\",", "or editing you can save changes \"\"\" def save(): share = {\"Name\": name.get(),", "over again: if exit_price.get() != \"\": total_buying_price = int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int(", "amount_label = tk.Label( top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label", "tk.Entry(top_window, width=10, font=FONT) low_price_entry.grid(row=3, column=1, padx=5, pady=5) high_price_entry.insert(0, alarm[1]) low_price_entry.insert(0, alarm[2]) add_button =", "quantity.get(), \"BuyingPrice\": entry_price.get(), \"BuyingDate\": date.get(), \"Cost\": \"\", \"SellingPrice\": \"\", \"SellingDate\": \"\", \"Dividends\": \"\"}", "a closed or active share opens a menu windows with buttons allowing you", "bg=\"green\", command=lambda: edit(parent, share, menu_window)) edit_button.grid(row=0, column=0, pady=20, padx=20) alarm_button = tk.Button( menu_window,", "you can save changes \"\"\" def save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(),", "if messagebox.askyesno( \"Delete\", \"Are you sure you want to delete this?\"): if to_delete[6]", "and added to dictionary # without the need of calculating it all over", "\"Calabria 12\" def add_shares(parent): \"\"\"allows you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and", "\"-\" + list_of_dates[counter].get()) share[\"Dividends\"] = '|'.join(all_dividents) # check if all input correct if(manage_db.check_if_valid_name(name.get())", "\"Divident:\", \"Divident date:\", \"Exit price (per share):\", \"Exit dete:\"] for txt in list_of_labels:", "insert up to four last # dividend entries and leave last (5th) field", "padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date =", "text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price", "def add_shares(parent): \"\"\"allows you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and", "text=\"Price is higher or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label(", "scrap_web) FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows you to add share\"\"\" def", "manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"]))", "manage_db.check_for_real_numbers(exit_price.get()) or exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''):", "when:\", font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is higher or", "quantity.insert(0, old_share[2]) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=5) entry_price.insert(0, old_share[3]) entry_date", "font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5) name.insert(0,", "exit_price.get() == '' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if", "\"\": list_of_entries = (div_2, div_3, div_4, div_5) list_of_dates = (div_date_2, div_date_3, div_date_4, div_date_5)", "top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\",", "share[6] == \"\": shares_page.Shares.curent_canvas(parent) else: shares_page.Shares.historical_canvas(parent) top_window = tk.Toplevel(parent, height=350, width=390) top_window.title(\"Edit\") amount_label", "amount_label.grid(sticky=\"nw\", padx=5, pady=5) label = tk.Label( top_window, text=\"Price is higher or equal to:\",", "share):\", \"Entry date:\", ] for txt in list_of_labels: label = tk.Label(top_window, text=txt, font=FONT)", "column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date", "\"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy() def edit(parent, old_share, choice_window): \"\"\"display edit window for active", "\"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share) manage_db.add_current_price( share[\"Name\"], scrap_web.pull_current_price(share[\"Name\"])) shares_page.Shares.curent_canvas(parent) else:", "column=1, padx=5) div_2 = tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window,", "padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm to a share\"\"\" prof =", "'' and manage_db.check_date_format(entry_date.get()) and manage_db.check_date_format(exit_date.get()) or exit_date.get() == ''): top_window.destroy() if old_share[6] ==", "entry, this allows you to view and edit previous entries if old_share[8]: list_of_entries", "input from dividend # and dividend_date entries. # If no previous inputs: if", "pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, )", "save in order all user input from dividend # and dividend_date entries. #", "if(manage_db.check_for_real_numbers(high) and manage_db.check_for_real_numbers(low)): manage_db.add_alarm(prof, high, low) top_window.destroy() if share[6] == \"\": shares_page.Shares.curent_canvas(parent) else:", "active share opens a menu windows with buttons allowing you to edit/add alarm/delete", "opens a menu windows with buttons allowing you to edit/add alarm/delete share \"\"\"", "padx=5) div_date_5 = tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9,", "replace the ones displayed # (up to 4 latest) with the new ones", "div_date_4 = tk.Entry(top_window, width=9, font=FONT) div_date_4.grid(row=5, column=4, padx=5) # if share does have", "FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows you to add share\"\"\" def save():", "menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20, ) def delete(parent, to_delete, choice_window): \"\"\"deletes", "total_selling_price)) # Collect and save in order all user input from dividend #", "command=lambda: set_alarm(parent, share, menu_window)) alarm_button.grid(row=0, column=1, pady=20) delete_button = tk.Button( menu_window, text=\"Delete\", font=FONT,", "exit_date.get() == ''): top_window.destroy() if old_share[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else:", "list_of_notes = (div_date_1, div_date_2, div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter", "\"\"\"allows you to add share\"\"\" def save(): if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_date_format(date.get())): share", "messagebox.askyesno( \"Delete\", \"Are you sure you want to delete this?\"): if to_delete[6] ==", "can save changes \"\"\" def save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\":", "= int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect and save in", "div_2, div_1) list_of_notes = ( div_date_4, div_date_3, div_date_2, div_date_1) last_dividends = old_share[8].split(\"|\") last_dividends.reverse()", "range(0, number_of_entries_to_clear): if all_dividents: # if len(all_dividents) > 0: all_dividents.remove( all_dividents[len(all_dividents)-1]) for counter,", "font=FONT) label.grid(sticky=\"nw\") # ENTRIES: name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=10) quantity", "If no previous inputs: if old_share[8] == \"\": list_of_entries = (div_2, div_3, div_4,", "\"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", to_delete[0]) shares_page.Shares.historical_canvas(parent) choice_window.destroy()", "displayed # (up to 4 latest) with the new ones (in case edits", "column=2, pady=20, padx=20) cancel_button = tk.Button( menu_window, text=\"Cancel\", font=FONT, command=menu_window.destroy) cancel_button.grid(row=0, column=3, pady=20,", "font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button = tk.Button(frame, text=\"?\", font=FONT) help_button.grid(row=0, column=2, padx=10,", "column=2, padx=10, pady=5) def set_alarm(parent, share, choice_window): \"\"\"adds alarm to a share\"\"\" prof", "if share does have a dividend inputed, insert up to four last #", "counter in range(0, dividends_to_display): for div_or_date, value in enumerate( last_dividends[counter].split(\"-\")): # first run", "pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy) close_button.grid(row=0, column=1, pady=5) help_button =", "manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\":", "text=txt, font=FONT) label.grid(sticky=\"nw\") # ENTRIES name = tk.Entry(top_window, width=9, font=FONT) name.grid(row=0, column=1, padx=5)", "padx=10) quantity = tk.Entry(top_window, width=9, font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9,", "def set_alarm(parent, share, choice_window): \"\"\"adds alarm to a share\"\"\" prof = share[1] alarm", "to 4 latest) with the new ones (in case edits were made) else:", "= '|'.join(all_dividents) # check if all input correct if(manage_db.check_if_valid_name(name.get()) and manage_db.check_for_real_numbers(entry_price.get()) and manage_db.check_for_real_numbers(exit_price.get())", "= tk.Entry(top_window, width=9, font=FONT) div_2.grid(row=4, column=2, padx=5) div_date_2 = tk.Entry(top_window, width=9, font=FONT) div_date_2.grid(row=5,", "last (5th) field empty for a new # entry, this allows you to", "in enumerate( last_dividends[counter].split(\"-\")): # first run of last for loop gives you dividend", "tk from tkinter import messagebox from Files import (shares_page, manage_db, calculate, scrap_web) FONT", "text=\"Price is lower or equal to:\", font=FONT) label.grid(sticky=\"nw\", padx=5, pady=5) high_price_entry = tk.Entry(top_window,", "frame = tk.Frame(top_window, width=200, height=30) frame.grid(sticky=\"nw\", columnspan=5) add_button = tk.Button(frame, text=\"Edit\", font=FONT, command=save)", "int( share[\"Quantity\"])*float(share[\"BuyingPrice\"]) total_selling_price = int( share[\"Quantity\"])*float(share[\"SellingPrice\"]) share[\"Cost\"] = str(calculate.total_costs( total_buying_price, total_selling_price)) # Collect", "share): \"\"\"right clicking on a closed or active share opens a menu windows", "4 all_dividents = old_share[8].split(\"|\") for __ in range(0, number_of_entries_to_clear): if all_dividents: # if", "exit_price.grid(row=6, column=1, padx=5) exit_price.insert(0, old_share[6]) exit_date = tk.Entry(top_window, width=9, font=FONT) exit_date.grid(row=7, column=1, padx=5)", "font=FONT, command=save) add_button.grid(row=0, column=0, padx=10, pady=5) close_button = tk.Button( frame, text=\"Close\", font=FONT, command=top_window.destroy)", "all_dividents[len(all_dividents)-1]) for counter, entrie in enumerate(list_of_entries): if entrie.get(): all_dividents.append(entrie.get() + \"-\" + list_of_dates[counter].get())", "amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let me know when:\", font=FONT) amount_label.grid(sticky=\"nw\",", "save changes \"\"\" def save(): share = {\"Name\": name.get(), \"Quantity\": quantity.get(), \"BuyingPrice\": entry_price.get(),", "# Collect and save in order all user input from dividend # and", "= (div_date_2, div_date_3, div_date_4, div_date_5) share[\"Dividends\"] = \"\" if div_1.get(): share[\"Dividends\"] = (", "\"timestamp\", old_share[0]) else: manage_db.delete_row_from_table( \"gpw_shares_closed\", \"timestamp\", old_share[0]) if share[\"SellingDate\"] == \"\": manage_db.add_share(\"gpw_shares\", share)", "manage_db.add_share(\"gpw_shares_closed\", share) shares_page.Shares.historical_canvas(parent) choice_window.destroy() top_window = tk.Toplevel(parent, height=600, width=390) # LABELS list_of_labels =", "(shares_page, manage_db, calculate, scrap_web) FONT = \"Calabria 12\" def add_shares(parent): \"\"\"allows you to", "div_date_3) last_dividends = old_share[8].split(\"|\") number_of_dividends = len(last_dividends) for counter in range(0, number_of_dividends): for", "font=FONT) quantity.grid(row=1, column=1, padx=10) entry_price = tk.Entry(top_window, width=9, font=FONT) entry_price.grid(row=2, column=1, padx=10) date", "font=FONT) div_1.grid(row=4, column=1, padx=5) div_date_1 = tk.Entry(top_window, width=9, font=FONT) div_date_1.grid(row=5, column=1, padx=5) div_2", "tk.Label( top_window, text=prof, font=FONT) amount_label.grid(sticky=\"nw\", padx=5, pady=5) amount_label = tk.Label( top_window, text=\"Let me", "\"Cost\": '' } # cost depends on changing current share price but if", "entries if old_share[8]: list_of_entries = (div_4, div_3, div_2, div_1) list_of_notes = ( div_date_4,", "this dividend's date if div_or_date == 0: list_of_entries[counter].insert(0, value) else: list_of_notes[counter].insert(0, value) div_5", "entrie in enumerate(list_of_entries): if entrie.get(): share[\"Dividends\"] = (share[\"Dividends\"]+\"|\" + entrie.get()+\"-\" + list_of_dates[counter].get()) #", "tk.Entry(top_window, width=9, font=FONT) div_date_5.grid(row=5, column=5, padx=5) exit_price = tk.Entry(top_window, width=9, font=FONT) exit_price.grid(row=6, column=1,", "delete this?\"): if to_delete[6] == \"\": manage_db.delete_row_from_table( \"gpw_shares\", \"timestamp\", to_delete[0]) shares_page.Shares.curent_canvas(parent) else: manage_db.delete_row_from_table(", "fixed selling price. # Therefore sold cost can be calculated and added to", "shares. After viewing or editing you can save changes \"\"\" def save(): share", "empty for a new # entry, this allows you to view and edit", "= [\"Name:\", \"Quantity:\", \"Entry price (per share):\", \"Entry date:\", ] for txt in" ]
[ "negatives def is_valid_IP(str): nums = str.split('.') return (len(nums) == 4) and all(is_valid_octet(n) for", "nums = str.split('.') return (len(nums) == 4) and all(is_valid_octet(n) for n in nums)", "<reponame>SelvorWhim/competitive<gh_stars>0 def is_valid_octet(n_str): return n_str.isdigit() and (n_str[0] != '0' or n_str == '0')", "<= 255 # isdigit also returns false for empty strings and negatives def", "255 # isdigit also returns false for empty strings and negatives def is_valid_IP(str):", "def is_valid_octet(n_str): return n_str.isdigit() and (n_str[0] != '0' or n_str == '0') and", "returns false for empty strings and negatives def is_valid_IP(str): nums = str.split('.') return", "also returns false for empty strings and negatives def is_valid_IP(str): nums = str.split('.')", "(n_str[0] != '0' or n_str == '0') and int(n_str) <= 255 # isdigit", "is_valid_IP(str): nums = str.split('.') return (len(nums) == 4) and all(is_valid_octet(n) for n in", "strings and negatives def is_valid_IP(str): nums = str.split('.') return (len(nums) == 4) and", "n_str.isdigit() and (n_str[0] != '0' or n_str == '0') and int(n_str) <= 255", "def is_valid_IP(str): nums = str.split('.') return (len(nums) == 4) and all(is_valid_octet(n) for n", "return n_str.isdigit() and (n_str[0] != '0' or n_str == '0') and int(n_str) <=", "and int(n_str) <= 255 # isdigit also returns false for empty strings and", "empty strings and negatives def is_valid_IP(str): nums = str.split('.') return (len(nums) == 4)", "and (n_str[0] != '0' or n_str == '0') and int(n_str) <= 255 #", "'0' or n_str == '0') and int(n_str) <= 255 # isdigit also returns", "!= '0' or n_str == '0') and int(n_str) <= 255 # isdigit also", "== '0') and int(n_str) <= 255 # isdigit also returns false for empty", "n_str == '0') and int(n_str) <= 255 # isdigit also returns false for", "false for empty strings and negatives def is_valid_IP(str): nums = str.split('.') return (len(nums)", "isdigit also returns false for empty strings and negatives def is_valid_IP(str): nums =", "# isdigit also returns false for empty strings and negatives def is_valid_IP(str): nums", "int(n_str) <= 255 # isdigit also returns false for empty strings and negatives", "for empty strings and negatives def is_valid_IP(str): nums = str.split('.') return (len(nums) ==", "and negatives def is_valid_IP(str): nums = str.split('.') return (len(nums) == 4) and all(is_valid_octet(n)", "'0') and int(n_str) <= 255 # isdigit also returns false for empty strings", "or n_str == '0') and int(n_str) <= 255 # isdigit also returns false", "is_valid_octet(n_str): return n_str.isdigit() and (n_str[0] != '0' or n_str == '0') and int(n_str)" ]
[ "int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old, new, occurrence=-1):", "key, value in data.items(): if isinstance(value, dict): value = pythonify(value) elif isinstance(value, list):", "k=count)) def rreplace(s, old, new, occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li) def", "val in value] elif isinstance(value, str): if value.lower() == \"true\": value = True", "data.items(): if isinstance(value, dict): value = pythonify(value) elif isinstance(value, list): value = [pythonify(val)", "random import string from llvmlite.ir import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase +", "isinstance(value, list): value = [pythonify(val) for val in value] elif isinstance(value, str): if", "= [pythonify(val) for val in value] elif isinstance(value, str): if value.lower() == \"true\":", "if name in self.identified_types: return self.identified_types[name] return None def monkey_patch(): Context.get_identified_type_if_exists = _get_identified_type_if_exists", "import random import string from llvmlite.ir import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase", "hashlib import random import string from llvmlite.ir import Context def generate_random_name(count: int): return", "= s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict): for key, value in data.items():", "s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict): for key, value in data.items(): if", "hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if name in self.identified_types: return self.identified_types[name] return", "if value.lower() == \"true\": value = True elif value.lower() == \"false\": value =", "return new.join(li) def pythonify(data: dict): for key, value in data.items(): if isinstance(value, dict):", "def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old,", "== \"false\": value = False data[key] = value return data def good_hash(w: str):", "new.join(li) def pythonify(data: dict): for key, value in data.items(): if isinstance(value, dict): value", "''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old, new, occurrence=-1): li =", "data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if name", "import hashlib import random import string from llvmlite.ir import Context def generate_random_name(count: int):", "in data.items(): if isinstance(value, dict): value = pythonify(value) elif isinstance(value, list): value =", "return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if name in self.identified_types: return self.identified_types[name]", "data[key] = value return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context,", "llvmlite.ir import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count))", "elif isinstance(value, str): if value.lower() == \"true\": value = True elif value.lower() ==", "def pythonify(data: dict): for key, value in data.items(): if isinstance(value, dict): value =", "value in data.items(): if isinstance(value, dict): value = pythonify(value) elif isinstance(value, list): value", "value = False data[key] = value return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest()", "for val in value] elif isinstance(value, str): if value.lower() == \"true\": value =", "dict): value = pythonify(value) elif isinstance(value, list): value = [pythonify(val) for val in", "\"true\": value = True elif value.lower() == \"false\": value = False data[key] =", "for key, value in data.items(): if isinstance(value, dict): value = pythonify(value) elif isinstance(value,", "def _get_identified_type_if_exists(self: Context, name: str): if name in self.identified_types: return self.identified_types[name] return None", "in value] elif isinstance(value, str): if value.lower() == \"true\": value = True elif", "isinstance(value, dict): value = pythonify(value) elif isinstance(value, list): value = [pythonify(val) for val", "value.lower() == \"false\": value = False data[key] = value return data def good_hash(w:", "[pythonify(val) for val in value] elif isinstance(value, str): if value.lower() == \"true\": value", "elif value.lower() == \"false\": value = False data[key] = value return data def", "\"false\": value = False data[key] = value return data def good_hash(w: str): return", "value = pythonify(value) elif isinstance(value, list): value = [pythonify(val) for val in value]", "string from llvmlite.ir import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase +", "good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if name in self.identified_types:", "def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if name in", "string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old, new, occurrence=-1): li = s.rsplit(old, occurrence)", "occurrence) return new.join(li) def pythonify(data: dict): for key, value in data.items(): if isinstance(value,", "dict): for key, value in data.items(): if isinstance(value, dict): value = pythonify(value) elif", "list): value = [pythonify(val) for val in value] elif isinstance(value, str): if value.lower()", "li = s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict): for key, value in", "pythonify(value) elif isinstance(value, list): value = [pythonify(val) for val in value] elif isinstance(value,", "+ string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old, new, occurrence=-1): li = s.rsplit(old,", "value] elif isinstance(value, str): if value.lower() == \"true\": value = True elif value.lower()", "_get_identified_type_if_exists(self: Context, name: str): if name in self.identified_types: return self.identified_types[name] return None def", "return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if", "+ string.digits, k=count)) def rreplace(s, old, new, occurrence=-1): li = s.rsplit(old, occurrence) return", "= pythonify(value) elif isinstance(value, list): value = [pythonify(val) for val in value] elif", "import string from llvmlite.ir import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase", "== \"true\": value = True elif value.lower() == \"false\": value = False data[key]", "new, occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict): for key,", "value = True elif value.lower() == \"false\": value = False data[key] = value", "string.digits, k=count)) def rreplace(s, old, new, occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li)", "= True elif value.lower() == \"false\": value = False data[key] = value return", "Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def rreplace(s,", "isinstance(value, str): if value.lower() == \"true\": value = True elif value.lower() == \"false\":", "str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str): if name in self.identified_types: return", "rreplace(s, old, new, occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict):", "from llvmlite.ir import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits,", "name: str): if name in self.identified_types: return self.identified_types[name] return None def monkey_patch(): Context.get_identified_type_if_exists", "str): if name in self.identified_types: return self.identified_types[name] return None def monkey_patch(): Context.get_identified_type_if_exists =", "def rreplace(s, old, new, occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li) def pythonify(data:", "= value return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name:", "value = [pythonify(val) for val in value] elif isinstance(value, str): if value.lower() ==", "generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old, new,", "occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict): for key, value", "str): if value.lower() == \"true\": value = True elif value.lower() == \"false\": value", "pythonify(data: dict): for key, value in data.items(): if isinstance(value, dict): value = pythonify(value)", "return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def rreplace(s, old, new, occurrence=-1): li", "value.lower() == \"true\": value = True elif value.lower() == \"false\": value = False", "False data[key] = value return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self:", "if isinstance(value, dict): value = pythonify(value) elif isinstance(value, list): value = [pythonify(val) for", "= False data[key] = value return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def", "elif isinstance(value, list): value = [pythonify(val) for val in value] elif isinstance(value, str):", "True elif value.lower() == \"false\": value = False data[key] = value return data", "old, new, occurrence=-1): li = s.rsplit(old, occurrence) return new.join(li) def pythonify(data: dict): for", "value return data def good_hash(w: str): return hashlib.md5(w.encode()).hexdigest() def _get_identified_type_if_exists(self: Context, name: str):", "import Context def generate_random_name(count: int): return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count)) def", "Context, name: str): if name in self.identified_types: return self.identified_types[name] return None def monkey_patch():" ]
[ "= [] for xtype in edge_types: df = getattr(inp, xtype, None) if df", "create_using=None): \"\"\"Create new nx.Graph-like object from a SWMM5.1 inp file Parameters ---------- inp", "target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp):", "kwarg of networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True)", "['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the", "df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1])", "df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from", "of graph to make. If None is specified, then this function defaults to", "a dict of node names with x, y coordinates as values. Parameters ----------", "str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return", "the node coordinates and subcatchment positions. Polygons are converted to coordinate pairs through", ".groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k): list(map(float, v)) for k, v in", "edge_list = [] for index, row in edges.iterrows(): _to, _from = index data", "set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates and", "\"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos", "G.nodes(data=True): df = {} n, data = node if index_col is not None:", "= str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def", "['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] +", "= ( coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return", "from a SWMM5.1 inp file Parameters ---------- inp : file_path or hymo.SWMMInpFile create_using", "be read to pull the node coordinates and subcatchment positions. Polygons are converted", "df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls = [] for edge in G.edges(data=True):", "pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES", "df.index) .assign(id=lambda df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node',", "source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def", "s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda", "'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage',", "df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not None:", "_from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp", "else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G,", "inp : string or hymo.SwmmInputFile this file will be read to pull the", "pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def", "df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G,", "def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates and subcatchment from inp file", "edges and nodes from a SWMM 5.1 input file. Parameters ---------- G :", "--------- This function is meant to be similar to the nx.from_pandas_edgelist() \"\"\" inp", "!= 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) )", ") node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes from", "[] for xtype in edge_types: df = getattr(inp, xtype, None) if df is", "df.index.map(str) df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp)", "None: df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True)", "pairs through their centroid. Returns ------- dict suitable for use as the `pos`", "pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = [] for", "xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges =", "'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] != 's'", "i.e., a dict of node names with x, y coordinates as values. Parameters", "edges_to_df(G): ls = [] for edge in G.edges(data=True): df = {} _from, _to,", "'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers',", "'s' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def", "= [] for xtype in node_types: df = getattr(inp, xtype, None) if df", "values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from a SWMM5.1 inp file", "['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:,", "cols=None): edges = df.set_index([source, target]) if cols is not None: if isinstance(cols, str):", "to pull the node coordinates and subcatchment positions. Polygons are converted to coordinate", "] def nodes_to_df(G, index_col=None): ls = [] for node in G.nodes(data=True): df =", "Graph Reference --------- This function is meant to be similar to the nx.from_pandas_edgelist()", "'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ]", "not None: df[index_col] = str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type'] =", "hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node')", "SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments',", "{} _from, _to, data = edge df['from'] = str(_from) df['to'] = str(_to) df['type']", "\"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = [] for xtype", "df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column,", "df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']]", "None is specified, then this function defaults to an nx.MultiDiGraph() instance Returns -------", "the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using is None: create_using = nx.MultiDiGraph()", "target='target', cols=None): edges = df.set_index([source, target]) if cols is not None: if isinstance(cols,", "Parameters ---------- inp : string or hymo.SwmmInputFile this file will be read to", "df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda", "return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) )", "df is not None: df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column,", "= df.index.map(str) df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp =", "return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs", "pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates", "create_using is None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node',", "_to, data = edge df['from'] = str(_from) df['to'] = str(_to) df['type'] = 'link'", "node_types = SWMM_NODE_TYPES node_dfs = [] for xtype in node_types: df = getattr(inp,", "= edge df['from'] = str(_from) df['to'] = str(_to) df['type'] = 'link' df.update(data) ls.append(df)", "drawing `pos` format, i.e., a dict of node names with x, y coordinates", "be similar to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using is None:", "is not None: df = df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index() return", "------- Graph Reference --------- This function is meant to be similar to the", "SWMM5.1 inp file Parameters ---------- inp : file_path or hymo.SWMMInpFile create_using : nx.Graph-like", "names with x, y coordinates as values. Parameters ---------- inp : string or", "from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G", ".pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s:", "'s' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df)", "_to, _from = index data = row.to_dict() edge_list.append([_to, _from, data]) return edge_list def", "df = getattr(inp, xtype, None) if df is not None: df = (", ".rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs = [] for xtype in", "nodes from a SWMM 5.1 input file. Parameters ---------- G : nx.Graph-like object", "from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from a SWMM5.1 inp file Parameters ----------", "file to networkx drawing `pos` format, i.e., a dict of node names with", "return pandas.DataFrame(ls) def edges_to_df(G): ls = [] for edge in G.edges(data=True): df =", "from a SWMM 5.1 input file. Parameters ---------- G : nx.Graph-like object inp", "'storage', ] def nodes_to_df(G, index_col=None): ls = [] for node in G.nodes(data=True): df", "= _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = (", "[] for node in G.nodes(data=True): df = {} n, data = node if", "edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not None: df = df.set_index(index_col) df.index =", "network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is", "function is meant to be similar to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp)", "pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes from a SWMM 5.1", "( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^'", "\"\"\" inp = _validate_hymo_inp(inp) if create_using is None: create_using = nx.MultiDiGraph() df_edge_list =", "not None: if isinstance(cols, str): cols = [cols] edges = edges.loc[:, cols] edge_list", "= _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = [] for xtype in node_types: df", "df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:,", "def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col", "import networkx as nx import hymo from .util import _upper_case_column, _validate_hymo_inp from .compat", ".mean()) .T .to_dict('list') ) return {str(k): list(map(float, v)) for k, v in pos.items()}", "\"\"\"Reads and converts swmm node coordinates and subcatchment from inp file to networkx", "and subcatchment from inp file to networkx drawing `pos` format, i.e., a dict", "networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys =", "'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls = [] for", "of networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys", "None: df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else", "def edges_to_df(G): ls = [] for edge in G.edges(data=True): df = {} _from,", "subcatchment from inp file to networkx drawing `pos` format, i.e., a dict of", ".compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ]", "def nodes_to_df(G, index_col=None): ls = [] for node in G.nodes(data=True): df = {}", "cols] edge_list = [] for index, row in edges.iterrows(): _to, _from = index", "if isinstance(cols, str): cols = [cols] edges = edges.loc[:, cols] edge_list = []", "def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from a SWMM5.1 inp file Parameters", "xtype, None) if df is not None: df = ( df .rename(columns={'From_Node': 'Inlet_Node',", "index_col is not None: df[index_col] = str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n)))", ".loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add", "if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df)", "polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float) .append( polys .astype(float) .groupby(polys.index)", "for xtype in edge_types: df = getattr(inp, xtype, None) if df is not", "( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']]", "object from a SWMM5.1 inp file Parameters ---------- inp : file_path or hymo.SWMMInpFile", "pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G,", "= pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None):", "nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using is None: create_using = nx.MultiDiGraph() df_edge_list", "df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls)", "inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^' +", ".rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def", "inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float) .append( polys", "to networkx drawing `pos` format, i.e., a dict of node names with x,", "= from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return", ".util import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs',", "node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes from a", "[] for xtype in node_types: df = getattr(inp, xtype, None) if df is", "nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs", ".pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^' + s))", "def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types =", "xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return", ") df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts", "coordinates and subcatchment positions. Polygons are converted to coordinate pairs through their centroid.", "x, y coordinates as values. Parameters ---------- inp : string or hymo.SwmmInputFile this", "to coordinate pairs through their centroid. Returns ------- dict suitable for use as", "hymo.SwmmInputFile this file will be read to pull the node coordinates and subcatchment", "`pos` kwarg of networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column,", "= _validate_hymo_inp(inp) if create_using is None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G", "set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [", "df.set_index([source, target]) if cols is not None: if isinstance(cols, str): cols = [cols]", ": string or hymo.SwmmInputFile this file will be read to pull the node", "nodes_to_df(G, index_col=None): ls = [] for node in G.nodes(data=True): df = {} n,", "'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if", "s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source',", "coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float)", "df.index = df.index.map(str) df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp", "create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using,", "= index data = row.to_dict() edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return", "with x, y coordinates as values. Parameters ---------- inp : string or hymo.SwmmInputFile", "centroid. Returns ------- dict suitable for use as the `pos` kwarg of networkx", "index_col=None): ls = [] for node in G.nodes(data=True): df = {} n, data", "if cols is not None: if isinstance(cols, str): cols = [cols] edges =", "df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links", "node in G.nodes(data=True): df = {} n, data = node if index_col is", "pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not None: df = df.set_index(index_col)", "\"\"\"Add the edges and nodes from a SWMM 5.1 input file. Parameters ----------", "node coordinates and subcatchment positions. Polygons are converted to coordinate pairs through their", "G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs)", "polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k): list(map(float, v)) for k,", "cols is not None: if isinstance(cols, str): cols = [cols] edges = edges.loc[:,", "n, data = node if index_col is not None: df[index_col] = str(n) df['from']", "= ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node',", "import pandas import networkx as nx import hymo from .util import _upper_case_column, _validate_hymo_inp", "a SWMM 5.1 input file. Parameters ---------- G : nx.Graph-like object inp :", "= node if index_col is not None: df[index_col] = str(n) df['from'] = str(n)", "= [] for index, row in edges.iterrows(): _to, _from = index data =", "edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source, target]) if cols is", "not None: df = df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index() return df", "pos = ( coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') )", "inp : file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list", "data = edge df['from'] = str(_from) df['to'] = str(_to) df['type'] = 'link' df.update(data)", "s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes", "read to pull the node coordinates and subcatchment positions. Polygons are converted to", "through their centroid. Returns ------- dict suitable for use as the `pos` kwarg", "pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create", "is specified, then this function defaults to an nx.MultiDiGraph() instance Returns ------- Graph", "from .util import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [", "and converts swmm node coordinates and subcatchment from inp file to networkx drawing", ": file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list =", "coordinates as values. Parameters ---------- inp : string or hymo.SwmmInputFile this file will", "df = df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp):", "an nx.MultiDiGraph() instance Returns ------- Graph Reference --------- This function is meant to", "row in edges.iterrows(): _to, _from = index data = row.to_dict() edge_list.append([_to, _from, data])", "_validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = [] for xtype in node_types: df =", ".rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df:", "df.index) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']]", "coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k): list(map(float,", ") if index_col is not None: df = df.set_index(index_col) df.index = df.index.map(str) df", "swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates and subcatchment from inp file to", "index, row in edges.iterrows(): _to, _from = index data = row.to_dict() edge_list.append([_to, _from,", "pandas.DataFrame(ls) def edges_to_df(G): ls = [] for edge in G.edges(data=True): df = {}", "df = {} n, data = node if index_col is not None: df[index_col]", "y coordinates as values. Parameters ---------- inp : string or hymo.SwmmInputFile this file", "are converted to coordinate pairs through their centroid. Returns ------- dict suitable for", "xtype, None) if df is not None: df = ( df .pipe(_upper_case_column, include_index=True)", "target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like", "'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return", "inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T", "of node names with x, y coordinates as values. Parameters ---------- inp :", "= [] for edge in G.edges(data=True): df = {} _from, _to, data =", "= SWMM_NODE_TYPES node_dfs = [] for xtype in node_types: df = getattr(inp, xtype,", "inp file to networkx drawing `pos` format, i.e., a dict of node names", "= inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float) .append(", "= getattr(inp, xtype, None) if df is not None: df = ( df", "s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df,", ": nx.Graph-like object, optional (default=None) the type of graph to make. If None", "'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs = [] for", "'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node',", "the edges and nodes from a SWMM 5.1 input file. Parameters ---------- G", "= [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions',", "= pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from a", "graph to make. If None is specified, then this function defaults to an", "df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)])", "df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\"", "df[index_col] = str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data)", "= _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs =", "<reponame>austinorr/swmmnetwork<filename>swmmnetwork/convert.py<gh_stars>1-10 import pandas import networkx as nx import hymo from .util import _upper_case_column,", "= _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda", "node_dfs = [] for xtype in node_types: df = getattr(inp, xtype, None) if", "if index_col is not None: df[index_col] = str(n) df['from'] = str(n) df['to'] =", "hymo from .util import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES =", "'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges", "---------- inp : string or hymo.SwmmInputFile this file will be read to pull", "or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node',", "subcatchment positions. Polygons are converted to coordinate pairs through their centroid. Returns -------", "if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s:", "str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls = []", "then this function defaults to an nx.MultiDiGraph() instance Returns ------- Graph Reference ---------", "row.to_dict() edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\"", "file Parameters ---------- inp : file_path or hymo.SWMMInpFile create_using : nx.Graph-like object, optional", "= nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, )", "cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] !=", "edges.iterrows(): _to, _from = index data = row.to_dict() edge_list.append([_to, _from, data]) return edge_list", "= str(_to) df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df", "!= 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str)", "df is not None: df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1]", "= ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:,", "( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not None: df =", "s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) )", ".assign(id=lambda df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node',", "Returns ------- dict suitable for use as the `pos` kwarg of networkx drawing", "Reference --------- This function is meant to be similar to the nx.from_pandas_edgelist() \"\"\"", "ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls = [] for edge in G.edges(data=True): df", "(default=None) the type of graph to make. If None is specified, then this", "import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES", "to be similar to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using is", "if df is not None: df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'})", "xtype in edge_types: df = getattr(inp, xtype, None) if df is not None:", "edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges", "xtype in node_types: df = getattr(inp, xtype, None) if df is not None:", "in edges.iterrows(): _to, _from = index data = row.to_dict() edge_list.append([_to, _from, data]) return", "df['from'] = str(_from) df['to'] = str(_to) df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls)", ") edge_types = SWMM_LINK_TYPES edge_dfs = [] for xtype in edge_types: df =", "If None is specified, then this function defaults to an nx.MultiDiGraph() instance Returns", "\"\"\"Create new nx.Graph-like object from a SWMM5.1 inp file Parameters ---------- inp :", "pandas import networkx as nx import hymo from .util import _upper_case_column, _validate_hymo_inp from", ": file_path or hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None) the type of", "_validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets',", "inp : file_path or hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None) the type", "getattr(inp, xtype, None) if df is not None: df = ( df .rename(columns={'From_Node':", "is not None: df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node',", "source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new", "_validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords", "= df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\"", ".reset_index(drop=True) ) if index_col is not None: df = df.set_index(index_col) df.index = df.index.map(str)", ".rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types =", "set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from a SWMM5.1 inp", "node_types: df = getattr(inp, xtype, None) if df is not None: df =", "and nodes from a SWMM 5.1 input file. Parameters ---------- G : nx.Graph-like", "include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] != 's' else", "source='source', target='target', cols=None): edges = df.set_index([source, target]) if cols is not None: if", ": nx.Graph-like object inp : file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list", "add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes from a SWMM 5.1 input file.", "type of graph to make. If None is specified, then this function defaults", "from inp file to networkx drawing `pos` format, i.e., a dict of node", "5.1 input file. Parameters ---------- G : nx.Graph-like object inp : file_path or", "_validate_hymo_inp(inp) if create_using is None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G =", "cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt')", "str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G):", "This function is meant to be similar to the nx.from_pandas_edgelist() \"\"\" inp =", "nx.Graph-like object inp : file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list =", "= inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean())", "---------- inp : file_path or hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None) the", "= str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df)", "edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\"", "pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if", "'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES", "df['to'] = str(_to) df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None):", "return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp)", "nx.Graph-like object, optional (default=None) the type of graph to make. If None is", "values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates and subcatchment", "for index, row in edges.iterrows(): _to, _from = index data = row.to_dict() edge_list.append([_to,", "= {} _from, _to, data = edge df['from'] = str(_from) df['to'] = str(_to)", "'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def", "cols = [cols] edges = edges.loc[:, cols] edge_list = [] for index, row", "target]) if cols is not None: if isinstance(cols, str): cols = [cols] edges", "networkx as nx import hymo from .util import _upper_case_column, _validate_hymo_inp from .compat import", "df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype',", "= {} n, data = node if index_col is not None: df[index_col] =", "this file will be read to pull the node coordinates and subcatchment positions.", "inp = _validate_hymo_inp(inp) if create_using is None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp)", "+ s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower())", "nx.Graph-like object from a SWMM5.1 inp file Parameters ---------- inp : file_path or", "for xtype in node_types: df = getattr(inp, xtype, None) if df is not", "= [cols] edges = edges.loc[:, cols] edge_list = [] for index, row in", "edge_types: df = getattr(inp, xtype, None) if df is not None: df =", "'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls = [] for node in G.nodes(data=True):", ".assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) )", "None: df = df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index() return df def", "df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm", ".assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node',", "= [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls =", "index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not", "pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source,", "\"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = [] for xtype in", "string or hymo.SwmmInputFile this file will be read to pull the node coordinates", "\"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list)", "dict suitable for use as the `pos` kwarg of networkx drawing methods. \"\"\"", "include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower())", "node names with x, y coordinates as values. Parameters ---------- inp : string", "file will be read to pull the node coordinates and subcatchment positions. Polygons", "None: df[index_col] = str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type'] = 'node'", ".loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links]", "= pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs)", "file_path or hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None) the type of graph", "new nx.Graph-like object from a SWMM5.1 inp file Parameters ---------- inp : file_path", "[ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls = []", "edges.loc[:, cols] edge_list = [] for index, row in edges.iterrows(): _to, _from =", "import hymo from .util import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES", "df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls = [] for", "hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None) the type of graph to make.", "import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices',", "input file. Parameters ---------- G : nx.Graph-like object inp : file_path or hymo.SWMMInpFile", "df = {} _from, _to, data = edge df['from'] = str(_from) df['to'] =", "xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower())", "edges = edges.loc[:, cols] edge_list = [] for index, row in edges.iterrows(): _to,", "df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs =", "Parameters ---------- inp : file_path or hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None)", "edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types", "in G.edges(data=True): df = {} _from, _to, data = edge df['from'] = str(_from)", "[ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls',", "df: df.index) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype',", "df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links = (", "networkx drawing `pos` format, i.e., a dict of node names with x, y", "edge df['from'] = str(_from) df['to'] = str(_to) df['type'] = 'link' df.update(data) ls.append(df) return", "is None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node',", "to an nx.MultiDiGraph() instance Returns ------- Graph Reference --------- This function is meant", "edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads", "= str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls =", "`pos` format, i.e., a dict of node names with x, y coordinates as", "= 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls = [] for edge", "will be read to pull the node coordinates and subcatchment positions. Polygons are", "include_index=True) pos = ( coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list')", "file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list,", "'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs = []", "ls = [] for edge in G.edges(data=True): df = {} _from, _to, data", "swmm node coordinates and subcatchment from inp file to networkx drawing `pos` format,", "from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps',", "= ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not None: df", ".rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges", "[cols] edges = edges.loc[:, cols] edge_list = [] for index, row in edges.iterrows():", "edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp,", "inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index)", "= SWMM_LINK_TYPES edge_dfs = [] for xtype in edge_types: df = getattr(inp, xtype,", "edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None):", "'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str)", "for edge in G.edges(data=True): df = {} _from, _to, data = edge df['from']", "df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = (", "or hymo.SWMMInpFile create_using : nx.Graph-like object, optional (default=None) the type of graph to", "str): cols = [cols] edges = edges.loc[:, cols] edge_list = [] for index,", "= pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges =", "= pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node", "in G.nodes(data=True): df = {} n, data = node if index_col is not", ".loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs", "meant to be similar to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using", "in node_types: df = getattr(inp, xtype, None) if df is not None: df", "None) if df is not None: df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype", "if index_col is not None: df = df.set_index(index_col) df.index = df.index.map(str) df =", "include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos = ( coords .astype(float) .append( polys .astype(float)", "coordinates and subcatchment from inp file to networkx drawing `pos` format, i.e., a", "pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True)", "return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes from a SWMM", "( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['xtype']]", "s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs = [] for xtype in edge_types:", "_validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df:", "def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source, target]) if cols is not", "Returns ------- Graph Reference --------- This function is meant to be similar to", "df['to'] = str(sorted(G.successors(n))) df['type'] = 'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls", "None) if df is not None: df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node':", "for node in G.nodes(data=True): df = {} n, data = node if index_col", "and subcatchment positions. Polygons are converted to coordinate pairs through their centroid. Returns", "edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source, target]) if", "function defaults to an nx.MultiDiGraph() instance Returns ------- Graph Reference --------- This function", "format, i.e., a dict of node names with x, y coordinates as values.", "'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col),", "= row.to_dict() edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp):", "pull the node coordinates and subcatchment positions. Polygons are converted to coordinate pairs", "Polygons are converted to coordinate pairs through their centroid. Returns ------- dict suitable", "as nx import hymo from .util import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist,", "[] for index, row in edges.iterrows(): _to, _from = index data = row.to_dict()", "ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G, index_col), edges_to_df(G)]) .reset_index(drop=True)", "if df is not None: df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if", "dict of node names with x, y coordinates as values. Parameters ---------- inp", ".pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1]", "similar to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using is None: create_using", "def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet',", "is not None: df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] !=", "optional (default=None) the type of graph to make. If None is specified, then", "G.edges(data=True): df = {} _from, _to, data = edge df['from'] = str(_from) df['to']", "inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = [] for xtype in node_types:", "create_using : nx.Graph-like object, optional (default=None) the type of graph to make. If", "isinstance(cols, str): cols = [cols] edges = edges.loc[:, cols] edge_list = [] for", "------- dict suitable for use as the `pos` kwarg of networkx drawing methods.", "object inp : file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp)", "index_col is not None: df = df.set_index(index_col) df.index = df.index.map(str) df = df.sort_index()", "include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet':", "'^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s:", "nx import hymo from .util import _upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes", "make. If None is specified, then this function defaults to an nx.MultiDiGraph() instance", "from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits', 'outlets', 'pumps', ] SWMM_NODE_TYPES =", "'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index)", "'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype", "\"\"\" inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df:", "G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object", "_upper_case_column, _validate_hymo_inp from .compat import from_pandas_edgelist, set_node_attributes SWMM_LINK_TYPES = [ 'weirs', 'orifices', 'conduits',", "_from, _to, data = edge df['from'] = str(_from) df['to'] = str(_to) df['type'] =", "is meant to be similar to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if", "coordinate pairs through their centroid. Returns ------- dict suitable for use as the", "= [] for node in G.nodes(data=True): df = {} n, data = node", "node coordinates and subcatchment from inp file to networkx drawing `pos` format, i.e.,", ".assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types", "inp file Parameters ---------- inp : file_path or hymo.SWMMInpFile create_using : nx.Graph-like object,", "str(_to) df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df =", "not None: df = ( df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'],", "to the nx.from_pandas_edgelist() \"\"\" inp = _validate_hymo_inp(inp) if create_using is None: create_using =", ".astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k): list(map(float, v))", "catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda", "data = node if index_col is not None: df[index_col] = str(n) df['from'] =", "SWMM_LINK_TYPES edge_dfs = [] for xtype in edge_types: df = getattr(inp, xtype, None)", "_validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index')", "\"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda", "( coords .astype(float) .append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k):", "if create_using is None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list,", "str(_from) df['to'] = str(_to) df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G,", "is not None: if isinstance(cols, str): cols = [cols] edges = edges.loc[:, cols]", "df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'}) .loc[:,", "specified, then this function defaults to an nx.MultiDiGraph() instance Returns ------- Graph Reference", "= 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def network_to_df(G, index_col=None): df = ( pandas.concat([nodes_to_df(G,", "inp = _validate_hymo_inp(inp) df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) edge_list = pandas_edgelist_to_edgelist(df_edge_list, source='inlet_node', target='outlet_node') G.add_edges_from(edge_list) df_node_attrs", "] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None):", "for use as the `pos` kwarg of networkx drawing methods. \"\"\" inp =", "in edge_types: df = getattr(inp, xtype, None) if df is not None: df", "None: create_using = nx.MultiDiGraph() df_edge_list = pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True,", ".astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k): list(map(float, v)) for k, v", "their centroid. Returns ------- dict suitable for use as the `pos` kwarg of", "index data = row.to_dict() edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items())", "= str(_from) df['to'] = str(_to) df['type'] = 'link' df.update(data) ls.append(df) return pandas.DataFrame(ls) def", "= df.set_index([source, target]) if cols is not None: if isinstance(cols, str): cols =", "['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs =", "G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates and subcatchment from inp", "edges = df.set_index([source, target]) if cols is not None: if isinstance(cols, str): cols", "SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls", "defaults to an nx.MultiDiGraph() instance Returns ------- Graph Reference --------- This function is", "edge_types = SWMM_LINK_TYPES edge_dfs = [] for xtype in edge_types: df = getattr(inp,", "'node' df.update(data) ls.append(df) return pandas.DataFrame(ls) def edges_to_df(G): ls = [] for edge in", "ls = [] for node in G.nodes(data=True): df = {} n, data =", "to make. If None is specified, then this function defaults to an nx.MultiDiGraph()", "s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and", "inp): \"\"\"Add the edges and nodes from a SWMM 5.1 input file. Parameters", "this function defaults to an nx.MultiDiGraph() instance Returns ------- Graph Reference --------- This", "nx.MultiDiGraph() instance Returns ------- Graph Reference --------- This function is meant to be", "df .rename(columns={'From_Node': 'Inlet_Node', 'To_Node': 'Outlet_Node'}) .pipe(_upper_case_column, cols=['Inlet_Node', 'Outlet_Node'], include_index=True) .loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda", "data = row.to_dict() edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def", "create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and", "the `pos` kwarg of networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords =", "= df.sort_index() return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links =", "= pandas_edgelist_from_swmm_inp(inp=inp) G = from_pandas_edgelist(df_edge_list, source='inlet_node', target='outlet_node', edge_attr=True, create_using=create_using, ) df_node_attrs = pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index')", "---------- G : nx.Graph-like object inp : file_path or hymo.SWMMInpFile \"\"\" inp =", "is not None: df[index_col] = str(n) df['from'] = str(n) df['to'] = str(sorted(G.successors(n))) df['type']", "suitable for use as the `pos` kwarg of networkx drawing methods. \"\"\" inp", "or hymo.SwmmInputFile this file will be read to pull the node coordinates and", ".append( polys .astype(float) .groupby(polys.index) .mean()) .T .to_dict('list') ) return {str(k): list(map(float, v)) for", "file. Parameters ---------- G : nx.Graph-like object inp : file_path or hymo.SWMMInpFile \"\"\"", "= edges.loc[:, cols] edge_list = [] for index, row in edges.iterrows(): _to, _from", "getattr(inp, xtype, None) if df is not None: df = ( df .pipe(_upper_case_column,", "node if index_col is not None: df[index_col] = str(n) df['from'] = str(n) df['to']", "as the `pos` kwarg of networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords", "= ( inp.subcatchments .pipe(_upper_case_column, cols='Outlet', include_index=True) .assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s:", "object, optional (default=None) the type of graph to make. If None is specified,", "pandas_node_attrs_from_swmm_inp(inp=inp).to_dict('index') set_node_attributes(G, values=df_node_attrs) def from_swmm_inp(inp, create_using=None): \"\"\"Create new nx.Graph-like object from a SWMM5.1", "list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs =", "Parameters ---------- G : nx.Graph-like object inp : file_path or hymo.SWMMInpFile \"\"\" inp", "else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda s: s.lower()) ) edge_dfs.append(df) edges", "def add_edges_from_swmm_inp(G, inp): \"\"\"Add the edges and nodes from a SWMM 5.1 input", "def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) node_types = SWMM_NODE_TYPES node_dfs = []", "edge_dfs = [] for xtype in edge_types: df = getattr(inp, xtype, None) if", "the type of graph to make. If None is specified, then this function", "data]) return edge_list def pandas_nodelist_to_nodelist(df): return list(df.to_dict('index').items()) def pandas_node_attrs_from_swmm_inp(inp): \"\"\" \"\"\" inp =", "SWMM_NODE_TYPES node_dfs = [] for xtype in node_types: df = getattr(inp, xtype, None)", "edge in G.edges(data=True): df = {} _from, _to, data = edge df['from'] =", "'id']] .rename(columns=lambda s: s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs = [] for xtype", "pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source, target]) if cols is not None:", "s.lower()) ) edge_types = SWMM_LINK_TYPES edge_dfs = [] for xtype in edge_types: df", "drawing methods. \"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column,", "index_col), edges_to_df(G)]) .reset_index(drop=True) ) if index_col is not None: df = df.set_index(index_col) df.index", "xtype[:-1]) .loc[:, ['xtype']] .rename(columns=lambda s: s.lower()) ) node_dfs.append(df) return pandas.concat(node_dfs).astype(str) def add_edges_from_swmm_inp(G, inp):", "instance Returns ------- Graph Reference --------- This function is meant to be similar", "'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls = [] for node in", "a SWMM5.1 inp file Parameters ---------- inp : file_path or hymo.SWMMInpFile create_using :", "positions. Polygons are converted to coordinate pairs through their centroid. Returns ------- dict", ".loc[:, ['Inlet_Node', 'Outlet_Node']] .assign(id=lambda df: df.index) .assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1])", "return G def swmm_inp_layout_to_pos(inp): \"\"\"Reads and converts swmm node coordinates and subcatchment from", "return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source, target]) if cols", "SWMM 5.1 input file. Parameters ---------- G : nx.Graph-like object inp : file_path", "_from = index data = row.to_dict() edge_list.append([_to, _from, data]) return edge_list def pandas_nodelist_to_nodelist(df):", "G : nx.Graph-like object inp : file_path or hymo.SWMMInpFile \"\"\" inp = _validate_hymo_inp(inp)", ") edge_dfs.append(df) edges = pandas.concat([catchment_links] + edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target',", "'pumps', ] SWMM_NODE_TYPES = [ 'subcatchments', 'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G,", "+ edge_dfs).astype(str) return edges def pandas_edgelist_to_edgelist(df, source='source', target='target', cols=None): edges = df.set_index([source, target])", "return df def pandas_edgelist_from_swmm_inp(inp): \"\"\" \"\"\" inp = _validate_hymo_inp(inp) catchment_links = ( inp.subcatchments", "[] for edge in G.edges(data=True): df = {} _from, _to, data = edge", "use as the `pos` kwarg of networkx drawing methods. \"\"\" inp = _validate_hymo_inp(inp)", "not None: df = ( df .pipe(_upper_case_column, include_index=True) .assign(xtype=xtype if xtype[-1] != 's'", "converts swmm node coordinates and subcatchment from inp file to networkx drawing `pos`", "values. Parameters ---------- inp : string or hymo.SwmmInputFile this file will be read", "converted to coordinate pairs through their centroid. Returns ------- dict suitable for use", "as values. Parameters ---------- inp : string or hymo.SwmmInputFile this file will be", ".assign(Inlet_Node=lambda df: df.index) .assign(id=lambda df: df.index.map(lambda s: '^' + s)) .assign(xtype='dt') .rename(columns={'Outlet': 'Outlet_Node'})", "None: if isinstance(cols, str): cols = [cols] edges = edges.loc[:, cols] edge_list =", "methods. \"\"\" inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True)", "inp = _validate_hymo_inp(inp) coords = inp.coordinates.pipe(_upper_case_column, include_index=True) polys = inp.polygons.pipe(_upper_case_column, include_index=True) pos =", "{} n, data = node if index_col is not None: df[index_col] = str(n)", ".assign(xtype=xtype if xtype[-1] != 's' else xtype[:-1]) .loc[:, ['Inlet_Node', 'Outlet_Node', 'xtype', 'id']] .rename(columns=lambda", "'junctions', 'outfalls', 'dividers', 'storage', ] def nodes_to_df(G, index_col=None): ls = [] for node" ]
[ "with open(filename, 'rb') as rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int, to_index:", "in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int) -> int: return self.bytes[index]", "import RomVariant from tlh import settings class Rom: def __init__(self, filename: str): with", "-> bytearray: # TODO apply constraints here? Or one level above in the", "'rb') as rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int) ->", "-> int: return len(self.bytes) def get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little')", "data is read only, so we only need to read it once roms:", "get_byte(self, index: int) -> int: return self.bytes[index] def length(self) -> int: return len(self.bytes)", "only need to read it once roms: dict[RomVariant, Rom] = {} # TODO", "'little') # Rom data is read only, so we only need to read", "get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read", "return roms[variant] def invalidate_rom(variant: RomVariant) -> None: global roms if variant in roms:", "index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read only,", "length(self) -> int: return len(self.bytes) def get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4],", "int: return len(self.bytes) def get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') #", "index: int) -> int: return self.bytes[index] def length(self) -> int: return len(self.bytes) def", "import settings class Rom: def __init__(self, filename: str): with open(filename, 'rb') as rom:", "TODO apply constraints here? Or one level above in the HexEditorInstance? return self.bytes[from_index:to_index]", "= bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int) -> bytearray: # TODO apply", "def get_bytes(self, from_index: int, to_index: int) -> bytearray: # TODO apply constraints here?", "def length(self) -> int: return len(self.bytes) def get_pointer(self, index: int) -> int: return", "be no changes def get_rom(variant: RomVariant) -> Optional[Rom]: global roms if variant not", "need to read it once roms: dict[RomVariant, Rom] = {} # TODO invalidate", "from typing import Optional from tlh.const import RomVariant from tlh import settings class", "TODO invalidate roms when settings change? # necessary? Once we have a valid", "Rom: def __init__(self, filename: str): with open(filename, 'rb') as rom: self.bytes = bytearray(rom.read())", "only, so we only need to read it once roms: dict[RomVariant, Rom] =", "# TODO apply constraints here? Or one level above in the HexEditorInstance? return", "return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read only, so we only need", "int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read only, so", "not in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return None return roms[variant] def", "class Rom: def __init__(self, filename: str): with open(filename, 'rb') as rom: self.bytes =", "rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int) -> bytearray: #", "self.bytes[index] def length(self) -> int: return len(self.bytes) def get_pointer(self, index: int) -> int:", "__init__(self, filename: str): with open(filename, 'rb') as rom: self.bytes = bytearray(rom.read()) def get_bytes(self,", "str): with open(filename, 'rb') as rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int,", "-> Optional[Rom]: global roms if variant not in roms: try: roms[variant] = Rom(settings.get_rom(variant))", "# Rom data is read only, so we only need to read it", "we have a valid rom, there will be no changes def get_rom(variant: RomVariant)", "int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read only, so we only need to", "int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read only, so we only", "len(self.bytes) def get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data", "a valid rom, there will be no changes def get_rom(variant: RomVariant) -> Optional[Rom]:", "level above in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int) -> int:", "Optional from tlh.const import RomVariant from tlh import settings class Rom: def __init__(self,", "from tlh.const import RomVariant from tlh import settings class Rom: def __init__(self, filename:", "def get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is", "to read it once roms: dict[RomVariant, Rom] = {} # TODO invalidate roms", "the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int) -> int: return self.bytes[index] def", "valid rom, there will be no changes def get_rom(variant: RomVariant) -> Optional[Rom]: global", "return None return roms[variant] def invalidate_rom(variant: RomVariant) -> None: global roms if variant", "rom, there will be no changes def get_rom(variant: RomVariant) -> Optional[Rom]: global roms", "in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return None return roms[variant] def invalidate_rom(variant:", "return self.bytes[index] def length(self) -> int: return len(self.bytes) def get_pointer(self, index: int) ->", "-> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom data is read only, so we", "{} # TODO invalidate roms when settings change? # necessary? Once we have", "try: roms[variant] = Rom(settings.get_rom(variant)) except: return None return roms[variant] def invalidate_rom(variant: RomVariant) ->", "as rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int) -> bytearray:", "def invalidate_rom(variant: RomVariant) -> None: global roms if variant in roms: del roms[variant]", "typing import Optional from tlh.const import RomVariant from tlh import settings class Rom:", "self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int) -> bytearray: # TODO", "tlh import settings class Rom: def __init__(self, filename: str): with open(filename, 'rb') as", "None return roms[variant] def invalidate_rom(variant: RomVariant) -> None: global roms if variant in", "get_bytes(self, from_index: int, to_index: int) -> bytearray: # TODO apply constraints here? Or", "read it once roms: dict[RomVariant, Rom] = {} # TODO invalidate roms when", "dict[RomVariant, Rom] = {} # TODO invalidate roms when settings change? # necessary?", "return len(self.bytes) def get_pointer(self, index: int) -> int: return int.from_bytes(self.bytes[index:index+4], 'little') # Rom", "invalidate roms when settings change? # necessary? Once we have a valid rom,", "will be no changes def get_rom(variant: RomVariant) -> Optional[Rom]: global roms if variant", "Rom data is read only, so we only need to read it once", "roms[variant] def invalidate_rom(variant: RomVariant) -> None: global roms if variant in roms: del", "apply constraints here? Or one level above in the HexEditorInstance? return self.bytes[from_index:to_index] def", "int, to_index: int) -> bytearray: # TODO apply constraints here? Or one level", "change? # necessary? Once we have a valid rom, there will be no", "if variant not in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return None return", "to_index: int) -> bytearray: # TODO apply constraints here? Or one level above", "we only need to read it once roms: dict[RomVariant, Rom] = {} #", "Rom(settings.get_rom(variant)) except: return None return roms[variant] def invalidate_rom(variant: RomVariant) -> None: global roms", "def get_rom(variant: RomVariant) -> Optional[Rom]: global roms if variant not in roms: try:", "necessary? Once we have a valid rom, there will be no changes def", "self.bytes[from_index:to_index] def get_byte(self, index: int) -> int: return self.bytes[index] def length(self) -> int:", "return self.bytes[from_index:to_index] def get_byte(self, index: int) -> int: return self.bytes[index] def length(self) ->", "Rom] = {} # TODO invalidate roms when settings change? # necessary? Once", "roms[variant] = Rom(settings.get_rom(variant)) except: return None return roms[variant] def invalidate_rom(variant: RomVariant) -> None:", "def __init__(self, filename: str): with open(filename, 'rb') as rom: self.bytes = bytearray(rom.read()) def", "Or one level above in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int)", "variant not in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return None return roms[variant]", "from tlh import settings class Rom: def __init__(self, filename: str): with open(filename, 'rb')", "settings change? # necessary? Once we have a valid rom, there will be", "Once we have a valid rom, there will be no changes def get_rom(variant:", "= {} # TODO invalidate roms when settings change? # necessary? Once we", "except: return None return roms[variant] def invalidate_rom(variant: RomVariant) -> None: global roms if", "HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int) -> int: return self.bytes[index] def length(self)", "open(filename, 'rb') as rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int)", "# TODO invalidate roms when settings change? # necessary? Once we have a", "changes def get_rom(variant: RomVariant) -> Optional[Rom]: global roms if variant not in roms:", "one level above in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int) ->", "global roms if variant not in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return", "-> int: return self.bytes[index] def length(self) -> int: return len(self.bytes) def get_pointer(self, index:", "is read only, so we only need to read it once roms: dict[RomVariant,", "above in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index: int) -> int: return", "there will be no changes def get_rom(variant: RomVariant) -> Optional[Rom]: global roms if", "have a valid rom, there will be no changes def get_rom(variant: RomVariant) ->", "roms: dict[RomVariant, Rom] = {} # TODO invalidate roms when settings change? #", "so we only need to read it once roms: dict[RomVariant, Rom] = {}", "Optional[Rom]: global roms if variant not in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except:", "int: return self.bytes[index] def length(self) -> int: return len(self.bytes) def get_pointer(self, index: int)", "bytearray: # TODO apply constraints here? Or one level above in the HexEditorInstance?", "once roms: dict[RomVariant, Rom] = {} # TODO invalidate roms when settings change?", "# necessary? Once we have a valid rom, there will be no changes", "get_rom(variant: RomVariant) -> Optional[Rom]: global roms if variant not in roms: try: roms[variant]", "here? Or one level above in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self, index:", "RomVariant) -> Optional[Rom]: global roms if variant not in roms: try: roms[variant] =", "when settings change? # necessary? Once we have a valid rom, there will", "import Optional from tlh.const import RomVariant from tlh import settings class Rom: def", "roms if variant not in roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return None", "filename: str): with open(filename, 'rb') as rom: self.bytes = bytearray(rom.read()) def get_bytes(self, from_index:", "def get_byte(self, index: int) -> int: return self.bytes[index] def length(self) -> int: return", "int) -> int: return self.bytes[index] def length(self) -> int: return len(self.bytes) def get_pointer(self,", "no changes def get_rom(variant: RomVariant) -> Optional[Rom]: global roms if variant not in", "constraints here? Or one level above in the HexEditorInstance? return self.bytes[from_index:to_index] def get_byte(self,", "roms: try: roms[variant] = Rom(settings.get_rom(variant)) except: return None return roms[variant] def invalidate_rom(variant: RomVariant)", "roms when settings change? # necessary? Once we have a valid rom, there", "read only, so we only need to read it once roms: dict[RomVariant, Rom]", "settings class Rom: def __init__(self, filename: str): with open(filename, 'rb') as rom: self.bytes", "it once roms: dict[RomVariant, Rom] = {} # TODO invalidate roms when settings", "RomVariant from tlh import settings class Rom: def __init__(self, filename: str): with open(filename,", "bytearray(rom.read()) def get_bytes(self, from_index: int, to_index: int) -> bytearray: # TODO apply constraints", "from_index: int, to_index: int) -> bytearray: # TODO apply constraints here? Or one", "int) -> bytearray: # TODO apply constraints here? Or one level above in", "tlh.const import RomVariant from tlh import settings class Rom: def __init__(self, filename: str):", "= Rom(settings.get_rom(variant)) except: return None return roms[variant] def invalidate_rom(variant: RomVariant) -> None: global" ]
[ "or else it throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes,", "activity in Houses that I am a member of.', required=False) promo_contact = forms.BooleanField(label='Yes,", "about events, new features, and other promotional information.', required=False) class VerificationForm(forms.Form): phone_number =", "'password1', 'password2', 'captcha'] def signup(self, request, user): \"\"\" Required, or else it throws", "I am a member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would like RoomScout", "of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me about", "['email', 'password1', 'password2', 'captcha'] def signup(self, request, user): \"\"\" Required, or else it", "from django import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email',", "pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me", "me about events, new features, and other promotional information.', required=False) class VerificationForm(forms.Form): phone_number", "ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2', 'captcha'] def signup(self, request, user): \"\"\"", "contact me about activity in Houses that I am a member of.', required=False)", "label='') field_order = ['email', 'password1', 'password2', 'captcha'] def signup(self, request, user): \"\"\" Required,", "\"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact", "about activity in Houses that I am a member of.', required=False) promo_contact =", "signup(self, request, user): \"\"\" Required, or else it throws deprecation warnings \"\"\" pass", "it throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would", "me about activity in Houses that I am a member of.', required=False) promo_contact", "required=False) promo_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me about events,", "that I am a member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would like", "like RoomScout to contact me about activity in Houses that I am a", "I would like RoomScout to contact me about events, new features, and other", "= ['email', 'password1', 'password2', 'captcha'] def signup(self, request, user): \"\"\" Required, or else", "'password2', 'captcha'] def signup(self, request, user): \"\"\" Required, or else it throws deprecation", "ReCaptchaField from captcha.widgets import ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form): captcha =", "events, new features, and other promotional information.', required=False) class VerificationForm(forms.Form): phone_number = forms.CharField(max_length=20,", "field_order = ['email', 'password1', 'password2', 'captcha'] def signup(self, request, user): \"\"\" Required, or", "RoomScout to contact me about activity in Houses that I am a member", "would like RoomScout to contact me about activity in Houses that I am", "captcha.fields import ReCaptchaField from captcha.widgets import ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form):", "= forms.BooleanField(label='Yes, I would like RoomScout to contact me about events, new features,", "django import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1',", "a member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact", "features, and other promotional information.', required=False) class VerificationForm(forms.Form): phone_number = forms.CharField(max_length=20, label='Phone Number',", "<filename>accounts/forms.py<gh_stars>10-100 from captcha.fields import ReCaptchaField from captcha.widgets import ReCaptchaV3 from django import forms", "AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2', 'captcha'] def signup(self,", "member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me", "new features, and other promotional information.', required=False) class VerificationForm(forms.Form): phone_number = forms.CharField(max_length=20, label='Phone", "contact me about events, new features, and other promotional information.', required=False) class VerificationForm(forms.Form):", "class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2', 'captcha'] def", "= ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2', 'captcha'] def signup(self, request, user):", "general_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me about activity in", "and other promotional information.', required=False) class VerificationForm(forms.Form): phone_number = forms.CharField(max_length=20, label='Phone Number', required=False)", "Required, or else it throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact =", "captcha.widgets import ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='')", "user): \"\"\" Required, or else it throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form):", "captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2', 'captcha'] def signup(self, request,", "would like RoomScout to contact me about events, new features, and other promotional", "to contact me about activity in Houses that I am a member of.',", "= forms.BooleanField(label='Yes, I would like RoomScout to contact me about activity in Houses", "throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like", "am a member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would like RoomScout to", "class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me about", "forms.BooleanField(label='Yes, I would like RoomScout to contact me about activity in Houses that", "I would like RoomScout to contact me about activity in Houses that I", "PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me about activity", "Houses that I am a member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I would", "else it throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I", "'captcha'] def signup(self, request, user): \"\"\" Required, or else it throws deprecation warnings", "from captcha.widgets import ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3,", "in Houses that I am a member of.', required=False) promo_contact = forms.BooleanField(label='Yes, I", "deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like RoomScout", "request, user): \"\"\" Required, or else it throws deprecation warnings \"\"\" pass class", "import ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order", "like RoomScout to contact me about events, new features, and other promotional information.',", "from captcha.fields import ReCaptchaField from captcha.widgets import ReCaptchaV3 from django import forms class", "import ReCaptchaField from captcha.widgets import ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form): captcha", "ReCaptchaV3 from django import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order =", "warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact = forms.BooleanField(label='Yes, I would like RoomScout to", "promo_contact = forms.BooleanField(label='Yes, I would like RoomScout to contact me about events, new", "def signup(self, request, user): \"\"\" Required, or else it throws deprecation warnings \"\"\"", "to contact me about events, new features, and other promotional information.', required=False) class", "\"\"\" Required, or else it throws deprecation warnings \"\"\" pass class PreferencesForm(forms.Form): general_contact", "forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2', 'captcha']", "import forms class AllauthSignupForm(forms.Form): captcha = ReCaptchaField(widget=ReCaptchaV3, label='') field_order = ['email', 'password1', 'password2',", "RoomScout to contact me about events, new features, and other promotional information.', required=False)", "forms.BooleanField(label='Yes, I would like RoomScout to contact me about events, new features, and" ]
[ "t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number", "fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search", "= request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return", "t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request):", "success = len(password_list) - fail for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 :", "get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import", "1 paginator = Paginator(lists, length) try: lists = paginator.page(page) except (EmptyPage, InvalidPage): lists", "', '').replace('\\t', '') if not password: continue password_list.append( password ) if form.file_ext ==", "form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" :", "form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password:", "status = request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功'))", "form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过", "def clear_fail2ban_cache(): redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ###############################", "length * (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html',", "= TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number +=", "render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir", "0 page = 1 count = len(lists) if start_num >= count: page =", "= len(password_list) - fail for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s')", "int(order_column) < len(colums): if order_dir == 'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else:", "= SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam'))", "utf-8 -*- from __future__ import unicode_literals import re import copy # import os", "start_num = int(data.get('start', '0')) page = start_num / length + 1 except ValueError:", "+ 1 except ValueError: start_num = 0 page = 1 count = len(lists)", "count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number = length * (page-1) + 1", "lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if lists.exists() and order_column", "start_num = 0 page = 1 count = len(lists) if start_num >= count:", "d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\")", "\"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def", "status = request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功'))", "'').replace('\\t', '') if not password: continue password_list.append( password ) fail_list = form.save_password_list(password_list) fail", "obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method", "spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\": form", "= lists.count() if start_num >= count: page = 1 paginator = Paginator(lists, length)", "request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist'))", "Q from django_redis import get_redis_connection from django.utils.translation import ugettext_lazy as _ from app.core.models", "domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数'))", "if request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request,", "{\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return HttpResponseRedirect(reverse('password_weaklist'))", "lists.count() if start_num >= count: page = 1 paginator = Paginator(lists, length) try:", "not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method ==", "HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method ==", "import TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models import Q from", ": domain, }) @licence_required def password_weaklist(request): if request.method == \"POST\": id = request.POST.get('id',", "TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1", "form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method == \"POST\":", "= Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set,", "'').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password ) if", "{'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs),", "form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain =", "get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form", "lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) lists =", "form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return", "security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set", "get_session_domain(domain_id) if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail", "try: lists = paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\":", "fail2ban_block_add(request): form = BanBlockListForm() if request.method == \"POST\": form = BanBlockListForm(request.POST) if form.is_valid():", "obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method == \"POST\": form = BanBlockListForm(request.POST,", "BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request,", "data.get('search[value]', '') colums = ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all()", "clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from", "form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request,", "\"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" : domain, }) @licence_required def password_weaklist(request): if", "for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render()", "get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if", "search = data.get('search[value]', '') colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time',", "'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required", "form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid():", "if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required", "Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList", "order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'password'] if", "= request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache()", "return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method == \"POST\":", "0 page = 1 count = lists.count() if start_num >= count: page =", "\"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data =", "'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search: lists = lists.filter(", "start_num = 0 page = 1 count = lists.count() if start_num >= count:", "from lib.licence import licence_required from lib.tools import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm,", "HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form =", "############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method == \"POST\": id = request.POST.get('id',", "Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request):", "def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\":", "Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request):", "ValueError: length = 1 try: start_num = int(data.get('start', '0')) page = start_num /", "in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not", ">= count: page = 1 paginator = Paginator(lists, length) try: lists = paginator.page(page)", "== 'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)])", "| Q(proto__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums): if order_dir", "\"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request)", "rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>'", "\"domain\" : domain, }) @licence_required def password_weaklist(request): if request.method == \"POST\": id =", "', '').replace('\\t', '') if not password: continue password_list.append( password ) fail_list = form.save_password_list(password_list)", "= data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'ip', 'name', 'disabled',]", "if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request,", "= BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request,", "Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj) if", "ugettext_lazy as _ from app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession,", "+= 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm() if request.method", "if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id)", "fail2ban_rulelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\")", "= request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return", "content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line in", "== \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status ==", "request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功'))", "continue password_list.append( password ) fail_list = form.save_password_list(password_list) fail = len(fail_list) success = len(password_list)", "def password_weaklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status',", "1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number})", "'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search) |", "redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表", "form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail = 0, 0 fail_list =", "= PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column) < len(colums): if order_dir ==", "if request.method == \"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'ip', 'name', 'disabled',] lists", "HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm() if request.method == \"POST\": form", "@licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method ==", "HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form =", "in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content,", "\"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST,", "############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method == \"POST\": id = request.POST.get('id',", "workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if line in (0,1): continue password =", "get_redis_connection from django.utils.translation import ugettext_lazy as _ from app.core.models import Mailbox, DomainAttr, Domain", "= Fail2BanTrustForm() if request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache()", "continue password_list.append( password ) if form.file_ext in ('xls', 'xlsx'): import xlrd content =", "return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain", "== \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required", "[] if form.file_ext == 'txt': for line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r',", "= request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return", "_(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column", ": form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required", "BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form})", "= request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return", "lists = Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if", "== \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required", "get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from lib.tools import clear_redis_cache from", "count: page = 1 paginator = Paginator(lists, length) try: lists = paginator.page(page) except", "PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method == \"POST\": form =", "'') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'ip',", "render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method == \"POST\": form", "lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and order_column and int(order_column)", "except ValueError: start_num = 0 page = 1 count = lists.count() if start_num", "lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []}", "fail = 0, 0 fail_list = [] password_list = [] if form.file_ext ==", "+ 1 except ValueError: start_num = 0 page = 1 count = lists.count()", "request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "1 except ValueError: start_num = 0 page = 1 count = len(lists) if", "lines = list(csv.reader(form.file_obj)) for elem in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace('", "form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request):", "= Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save()", "lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length', 1)) except ValueError: length = 1", "['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search: lists =", "Q(proto__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums): if order_dir ==", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form", "render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" : domain, }) @licence_required def password_weaklist(request):", "+= 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id =", "password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if", "request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '')", "request=request, domain_id=obj.id) if request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if", "if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if lists.exists()", "xrange(table.nrows): #前两行跳过 if line in (0,1): continue password = table.row_values(line) password = password.strip().replace('\\n',", "if search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and order_column", "= ['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search: lists = lists.filter(", "'') search = data.get('search[value]', '') colums = ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',]", "def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method == \"POST\":", "1 count = lists.count() if start_num >= count: page = 1 paginator =", "def security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam'))", "= paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str", "render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if", "lists[:10000] try: length = int(data.get('length', 1)) except ValueError: length = 1 try: start_num", "return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '')", "in ('xls', 'xlsx'): import xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table", "form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP", "Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if", "import messages from django.template.response import TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage from", "def fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '')", "list(csv.reader(form.file_obj)) for elem in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t',", "_(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column", "SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\"", "_(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if", "line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if", "(page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d,", "# 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\")", "return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column =", "'').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password )", "# 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\")", "_(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET order_column", "from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from lib.tools", "(EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count,", "'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search: lists =", "HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form =", "if line in (0,1): continue password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000',", "'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return", "= 1 count = lists.count() if start_num >= count: page = 1 paginator", "request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request,", "request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request,", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id):", "= Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return", "messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET", "table = workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if line in (0,1): continue", "not password: continue password_list.append( password ) if form.file_ext == 'csv': import csv lines", "= Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj)", "return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method == \"POST\":", "t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request):", "frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set,", "HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not", "import licence_required from lib.tools import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm,", "= Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set)", "fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个')", "messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" : domain,", "\"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir =", "form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request,", "\"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" :", "lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) try: length", "BanRuleForm() if request.method == \"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request,", "form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def", "form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request,", "t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form =", "return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method", "messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def", "@licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]',", "form.save_password_list(password_list) fail = len(fail_list) success = len(password_list) - fail for line in fail_list:", "import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for keyname in", "= lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) try: length =", "instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form})", "'') search = data.get('search[value]', '') colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute',", "# 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\")", "'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return", "messages from django.template.response import TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models", "@licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method ==", "form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist'))", "= SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request,", ") fail_list = form.save_password_list(password_list) fail = len(fail_list) success = len(password_list) - fail for", "len(password_list) - fail for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') %", "@licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method == \"POST\": form = Fail2BanTrustForm(request.POST)", "\"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method", "messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj", "| Q(ip__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums): if order_dir", "line in xrange(table.nrows): #前两行跳过 if line in (0,1): continue password = table.row_values(line) password", "失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_import.html\", {'form': form,})", "messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={", "__future__ import unicode_literals import re import copy # import os import json #", "form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data", "\"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method == \"POST\": form =", "'') colums = ['id', 'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else:", "\\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache():", ") if lists.exists() and order_column and int(order_column) < len(colums): if order_dir == 'desc':", "if form.file_ext in ('xls', 'xlsx'): import xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None,", "form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return", "= lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length', 1)) except ValueError: length =", "in (0,1): continue password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ',", "'') colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists =", "return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form", "clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method == \"POST\": id =", "files=request.FILES) if form.is_valid(): success, fail = 0, 0 fail_list = [] password_list =", "lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and order_column and int(order_column)", "1 for d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number})", "import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse from django.contrib import messages from", "Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname)", "request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request,", "= TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number +=", "= BanRuleForm(instance=obj) if request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save()", "django.db.models import Q from django_redis import get_redis_connection from django.utils.translation import ugettext_lazy as _", "domain, }) @licence_required def password_weaklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\")", "Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums): if", "return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method", "+= 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm() if request.method", "1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number})", "0, 0 fail_list = [] password_list = [] if form.file_ext == 'txt': for", "import ugettext_lazy as _ from app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session import", "= BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request,", "_(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if", "= DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\": form =", "lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try:", "search = data.get('search[value]', '') colums = ['id', 'password'] if search: lists = PasswordWeakList.objects.filter(", "line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password", "= 0 page = 1 count = len(lists) if start_num >= count: page", "def fail2ban_whitelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status',", "form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def", "= form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line in xrange(table.nrows):", "\"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功'))", "if start_num >= count: page = 1 paginator = Paginator(lists, length) try: lists", "form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request,", "'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search)", "keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if", "= data.get('search[value]', '') colums = ['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if", "= DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST)", "get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form", "= 0, 0 fail_list = [] password_list = [] if form.file_ext == 'txt':", "password ) fail_list = form.save_password_list(password_list) fail = len(fail_list) success = len(password_list) - fail", "if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required", "render from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse from django.contrib", "return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column =", "return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj)", "Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid, restart_process, get_fail2ban_info,", "length = int(data.get('length', 1)) except ValueError: length = 1 try: start_num = int(data.get('start',", "BanBlockListForm(instance=obj) if request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache()", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data", "Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache()", ": form, \"domain\" : domain, }) @licence_required def password_weaklist(request): if request.method == \"POST\":", "* (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d':", "ConfigParser from django.shortcuts import render from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers", "page = 1 paginator = Paginator(lists, length) try: lists = paginator.page(page) except (EmptyPage,", "colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length', 1)) except", "lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums):", "if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request,", "from django.db.models import Q from django_redis import get_redis_connection from django.utils.translation import ugettext_lazy as", "render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir", "from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for", "form = BanBlockListForm(instance=obj) if request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid():", "messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\":", "== \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功'))", "if request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request,", "form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not", "lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try: length = int(data.get('length', 1)) except ValueError:", "messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_import.html\",", "import re import copy # import os import json # import ConfigParser from", "for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render()", "return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form", "re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm()", "lists = paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0,", "rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form", "\"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return", "import Q from django_redis import get_redis_connection from django.utils.translation import ugettext_lazy as _ from", "'').replace('\\t', '') if not password: continue password_list.append( password ) if form.file_ext == 'csv':", "= PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method == \"POST\": form", "fail_list = form.save_password_list(password_list) fail = len(fail_list) success = len(password_list) - fail for line", "messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required", "import unicode_literals import re import copy # import os import json # import", "PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail = 0, 0 fail_list = [] password_list", "domain = get_session_domain(domain_id) if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid():", "request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist'))", "def password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '')", "Http404 from django.core.urlresolvers import reverse from django.contrib import messages from django.template.response import TemplateResponse", "'').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password ) fail_list =", "re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm()", "obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value,", "-*- from __future__ import unicode_literals import re import copy # import os import", "if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return", "else: lists = lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try: length = int(data.get('length',", "form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id =", "data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id',", "(page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d,", "if not password: continue password_list.append( password ) if form.file_ext == 'csv': import csv", "= data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name', 'ip', 'expire_time',", "request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request,", "django.template.response import TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models import Q", "lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL))", "import reverse from django.contrib import messages from django.template.response import TemplateResponse from django.core.paginator import", "1)) except ValueError: length = 1 try: start_num = int(data.get('start', '0')) page =", "'').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password ) fail_list", "number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def", "% colums[int(order_column)]) try: length = int(data.get('length', 1)) except ValueError: length = 1 try:", "fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search", "InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\":", "if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request,", "order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name', 'proto',", "rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form", "get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from lib.tools import clear_redis_cache from .forms import", "DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\": form = SpamSetForm(instance=spam_set,", "count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number = length * (page-1)", "get_session_domain # from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required", "for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request):", "not password: continue password_list.append( password ) if form.file_ext in ('xls', 'xlsx'): import xlrd", "= BanBlockListForm(instance=obj) if request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save()", "form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\",", "== \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required", "from django_redis import get_redis_connection from django.utils.translation import ugettext_lazy as _ from app.core.models import", "int(data.get('length', 1)) except ValueError: length = 1 try: start_num = int(data.get('start', '0')) page", "password_list.append( password ) if form.file_ext in ('xls', 'xlsx'): import xlrd content = form.file_obj.read()", "Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request,", "app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools", "table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password:", "import Paginator, EmptyPage, InvalidPage from django.db.models import Q from django_redis import get_redis_connection from", "import render from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse from", "request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "password: continue password_list.append( password ) if form.file_ext == 'csv': import csv lines =", "except ValueError: length = 1 try: start_num = int(data.get('start', '0')) page = start_num", "\"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def", "Paginator, EmptyPage, InvalidPage from django.db.models import Q from django_redis import get_redis_connection from django.utils.translation", "for line in xrange(table.nrows): #前两行跳过 if line in (0,1): continue password = table.row_values(line)", "1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number})", "\"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def", "= get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first()", "content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj:", "\"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir =", "status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={})", "= form.save_password_list(password_list) fail = len(fail_list) success = len(password_list) - fail for line in", "import os import json # import ConfigParser from django.shortcuts import render from django.http", "\"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method == \"POST\": id =", "instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form})", "form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单", "import copy # import os import json # import ConfigParser from django.shortcuts import", "length = 1 try: start_num = int(data.get('start', '0')) page = start_num / length", "line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return HttpResponseRedirect(reverse('password_weaklist')) return", "len(fail_list) success = len(password_list) - fail for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败", "\"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return", "lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length',", "+ 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number':", "PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column)", "colums[int(order_column)]) lists = lists[:10000] try: length = int(data.get('length', 1)) except ValueError: length =", "= len(fail_list) success = len(password_list) - fail for line in fail_list: messages.add_message(request, messages.ERROR,", "\"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request,", "Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if", "django_redis import get_redis_connection from django.utils.translation import ugettext_lazy as _ from app.core.models import Mailbox,", ") else: lists = PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column) < len(colums):", "== \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail = 0, 0", "0 fail_list = [] password_list = [] if form.file_ext == 'txt': for line", "else: lists = lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length', 1)) except ValueError:", "def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if", "'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search)", "page = 1 count = lists.count() if start_num >= count: page = 1", "if lists.exists() and order_column and int(order_column) < len(colums): if order_dir == 'desc': lists", "length * (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html',", "status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功'))", "if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required", "@licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj: return", "return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method", "search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if lists.exists() and", "data.get('search[value]', '') colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists", "= int(data.get('length', 1)) except ValueError: length = 1 try: start_num = int(data.get('start', '0'))", "messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request):", ": form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain", "form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\",", "fail = len(fail_list) success = len(password_list) - fail for line in fail_list: messages.add_message(request,", "messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj =", "render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value,", "from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse from django.contrib import", "password ) if form.file_ext == 'csv': import csv lines = list(csv.reader(form.file_obj)) for elem", "\"aaData\": []} re_str = '<td.*?>(.*?)</td>' number = length * (page-1) + 1 for", "fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method == \"POST\": form", "lib.tools import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm,", "= get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES)", "request.method == \"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功'))", "= Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists()", "\"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "-*- coding: utf-8 -*- from __future__ import unicode_literals import re import copy #", "lists = Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if", "% {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return", "form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist'))", "'xlsx'): import xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0]", "(page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d,", "fail2ban_whitelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\")", "'') if not password: continue password_list.append( password ) fail_list = form.save_password_list(password_list) fail =", "@licence_required def fail2ban_rule_add(request): form = BanRuleForm() if request.method == \"POST\": form = BanRuleForm(request.POST)", "= paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\":", "from app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip", "colums = ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search:", "domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set =", "HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]',", "ValueError: start_num = 0 page = 1 count = len(lists) if start_num >=", "return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '')", "form.file_ext == 'txt': for line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace('", "1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first()", "== \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return", "render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method == \"POST\": id", "clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data", "HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]',", "SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def", "'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if", "'') if not password: continue password_list.append( password ) if form.file_ext == 'csv': import", "restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from lib.tools import clear_redis_cache from .forms", "request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR,", "white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form =", "content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm() if request.method == \"POST\": form =", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required", "render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if", "'') colums = ['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search: lists", "def fail2ban_block_add(request): form = BanBlockListForm() if request.method == \"POST\": form = BanBlockListForm(request.POST) if", "status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={})", "request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail = 0,", "= start_num / length + 1 except ValueError: start_num = 0 page =", "SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request,", "@licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]',", "from app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain # from", "start_num >= count: page = 1 paginator = Paginator(lists, length) try: lists =", "\"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request):", "= lists[:10000] try: length = int(data.get('length', 1)) except ValueError: length = 1 try:", "return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form", "from django.shortcuts import render from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import", "form.file_ext in ('xls', 'xlsx'): import xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content)", "data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name', 'ip', 'expire_time', 'update_time',", "= request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache()", "rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form", "render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method == \"POST\": id", "t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request):", "for d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render()", "return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method == \"POST\":", "t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number", "'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search) |", "t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form =", "in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个,", "messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET", "workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if", "fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method == \"POST\": form", "obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form = Fail2BanTrustForm(request.POST,", "import get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence", "\"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return", "data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists',", "and int(order_column) < len(colums): if order_dir == 'desc': lists = lists.order_by('-%s' % colums[int(order_column)])", "search = data.get('search[value]', '') colums = ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists", "_(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form,", ": form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id", "line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS,", "_(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success,", "licence_required from lib.tools import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\", "colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try: length =", "def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method == \"POST\":", "_(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if", "django.core.urlresolvers import reverse from django.contrib import messages from django.template.response import TemplateResponse from django.core.paginator", "messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\":", "= ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if", "password_weaklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\")", "redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method == \"POST\": id", "t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form =", "* (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d':", "request.method == \"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功'))", "import json # import ConfigParser from django.shortcuts import render from django.http import HttpResponseRedirect,", "\"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist'))", "file_contents=content) table = workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if line in (0,1):", "line in (0,1): continue password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace('", "from lib.tools import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm,", "\"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\" : form.spam_check_outside_virus.value, })", "t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number", "+ 1 for d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number':", "return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if", "PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request):", "try: length = int(data.get('length', 1)) except ValueError: length = 1 try: start_num =", "= int(data.get('start', '0')) page = start_num / length + 1 except ValueError: start_num", "len(lists) if start_num >= count: page = 1 paginator = Paginator(lists, length) try:", ": form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if", "'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search: lists", "Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") :", "+ 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number':", "= password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append(", "屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status", "search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and order_column and", "length + 1 except ValueError: start_num = 0 page = 1 count =", "\"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method == \"POST\": id =", "number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj", "json # import ConfigParser from django.shortcuts import render from django.http import HttpResponseRedirect, HttpResponse,", "form.file_ext == 'csv': import csv lines = list(csv.reader(form.file_obj)) for elem in lines: password", "request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj)", "= xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if line", "(page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d,", "% colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try: length", "rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id", "屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status", "\"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method", "as _ from app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain", "order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums", "Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and", "form = BanBlockListForm() if request.method == \"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save()", ": %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\":", "SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis", "import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from lib.tools import clear_redis_cache", "messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET", "= Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj)", "django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models import Q from django_redis import get_redis_connection", "@licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if", "_(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_import.html\", {'form':", "BanRuleForm(instance=obj) if request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache()", "= ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search: lists", "# -*- coding: utf-8 -*- from __future__ import unicode_literals import re import copy", "form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### #", "= 1 try: start_num = int(data.get('start', '0')) page = start_num / length +", "'0')) page = start_num / length + 1 except ValueError: start_num = 0", "post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request,", "if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and order_column", "SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id)", "import csv lines = list(csv.reader(form.file_obj)) for elem in lines: password = line.strip().replace('\\n', '').replace('\\r',", "obj = Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form =", "and order_column and int(order_column) < len(colums): if order_dir == 'desc': lists = lists.order_by('-%s'", "'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return", "if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if", "'disabled',] lists = Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) )", "def fail2ban_rulelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status',", "\"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir =", "colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all()", "password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search", "禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status", "\"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist'))", "= data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'password'] if search:", "= lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and order_column and int(order_column) <", "def fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '')", "('xls', 'xlsx'): import xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table =", "lib.licence import licence_required from lib.tools import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm,", "= get_session_domain(domain_id) if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success,", "password_list.append( password ) if form.file_ext == 'csv': import csv lines = list(csv.reader(form.file_obj)) for", "os import json # import ConfigParser from django.shortcuts import render from django.http import", "\"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return", "render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if", "domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set =", "PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache()", "messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data =", "messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form = Fail2BanTrustForm()", "'disabled',] lists = Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) )", "django.shortcuts import render from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse", "'') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'password']", "search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and order_column and", "if not password: continue password_list.append( password ) if form.file_ext in ('xls', 'xlsx'): import", "= table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not", "if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" : form,", "= SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save()", "@licence_required def password_weaklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status =", ".forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import", "fail2ban_whitelist_add(request): form = Fail2BanTrustForm() if request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid():", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data", "number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm() if", "t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request):", "coding: utf-8 -*- from __future__ import unicode_literals import re import copy # import", "HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse from django.contrib import messages from django.template.response", "paginator = Paginator(lists, length) try: lists = paginator.page(page) except (EmptyPage, InvalidPage): lists =", "import Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools import", "Fail2BanTrustForm() if request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request,", "app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from", "BanBlockListForm() if request.method == \"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request,", "\"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist'))", "for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request,", "############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method == \"POST\": id = request.POST.get('id',", "if request.method == \"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm() if request.method == \"POST\":", "= BanBlockListForm() if request.method == \"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache()", "HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]',", "['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search)", "from django.template.response import TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models import", "= data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums =", "# import os import json # import ConfigParser from django.shortcuts import render from", "= BanRuleForm() if request.method == \"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache()", "= TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number +=", "t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number", "return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method ==", "[] password_list = [] if form.file_ext == 'txt': for line in form.file_obj.readlines(): password", "= list(csv.reader(form.file_obj)) for elem in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ',", "security_antispam(request): domain_id = get_domainid_bysession(request) obj = Domain.objects.filter(id=domain_id).first() if not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set", "password_list = [] if form.file_ext == 'txt': for line in form.file_obj.readlines(): password =", "from django.utils.translation import ugettext_lazy as _ from app.core.models import Mailbox, DomainAttr, Domain from", "1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm() if request.method ==", "\"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return", "return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '')", "if request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save()", "== \"POST\": form = BanBlockListForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return", "Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\")", "status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={})", "lists = lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try: length = int(data.get('length', 1))", "status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={})", "'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) try:", "re_str = '<td.*?>(.*?)</td>' number = length * (page-1) + 1 for d in", "'<td.*?>(.*?)</td>' number = length * (page-1) + 1 for d in lists.object_list: t", "continue password_list.append( password ) if form.file_ext == 'csv': import csv lines = list(csv.reader(form.file_obj))", "fail2ban_ip from lib.licence import licence_required from lib.tools import clear_redis_cache from .forms import BanRuleForm,", "lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL))", "data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search =", "lists.exists() and order_column and int(order_column) < len(colums): if order_dir == 'desc': lists =", "= Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists()", "DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if", "= request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache()", "count = lists.count() if start_num >= count: page = 1 paginator = Paginator(lists,", "== \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required", "return render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" : domain, }) @licence_required def", "== 'csv': import csv lines = list(csv.reader(form.file_obj)) for elem in lines: password =", "@licence_required def fail2ban_whitelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status =", "messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj", "messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') %", "TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models import Q from django_redis", "return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm() if request.method == \"POST\":", "lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password:", "%(fail)s') % {\"fail\": line}) messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail})", "\"\") status = request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS,", "'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) lists", "HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain =", "colums = ['id', 'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists", "request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功'))", "if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS,", "import get_redis_connection from django.utils.translation import ugettext_lazy as _ from app.core.models import Mailbox, DomainAttr,", "BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock,", "Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request,", "'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search)", "TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1", "= SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={", "BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form})", "form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def", "form.is_valid(): success, fail = 0, 0 fail_list = [] password_list = [] if", "context={ \"form\" : form, \"domain\" : domain, }) @licence_required def password_weaklist(request): if request.method", "start_num / length + 1 except ValueError: start_num = 0 page = 1", "= {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number", "= data.get('search[value]', '') colums = ['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',]", "_(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id)", "_(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj = Fail2BanBlock.objects.get(id=block_id)", "\"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number = length *", "= [] password_list = [] if form.file_ext == 'txt': for line in form.file_obj.readlines():", "status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功'))", "lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL))", "reverse from django.contrib import messages from django.template.response import TemplateResponse from django.core.paginator import Paginator,", "password ) if form.file_ext in ('xls', 'xlsx'): import xlrd content = form.file_obj.read() workbook", "'') if not password: continue password_list.append( password ) if form.file_ext in ('xls', 'xlsx'):", "def fail2ban_blocklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status',", "block_id): obj = Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method == \"POST\": form =", "1 except ValueError: start_num = 0 page = 1 count = lists.count() if", "import xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for", "if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail =", "xlrd content = form.file_obj.read() workbook = xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line", "if form.file_ext == 'csv': import csv lines = list(csv.reader(form.file_obj)) for elem in lines:", "HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def fail2ban_whitelist(request): if request.method ==", "from __future__ import unicode_literals import re import copy # import os import json", "return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" :", ": redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method == \"POST\":", "{\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number =", "@licence_required def fail2ban_block_add(request): form = BanBlockListForm() if request.method == \"POST\": form = BanBlockListForm(request.POST)", "Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form})", "colums[int(order_column)]) try: length = int(data.get('length', 1)) except ValueError: length = 1 try: start_num", "messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ############################### # 屏蔽白名单 @licence_required def", "= lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000] try: length = int(data.get('length', 1)) except", "order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name', 'ip',", "for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render()", "['id', 'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all()", "id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete()", "order_dir == 'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' %", "\"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value,", "if form.is_valid(): success, fail = 0, 0 fail_list = [] password_list = []", "PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection()", "id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete()", "re import copy # import os import json # import ConfigParser from django.shortcuts", "from django.core.urlresolvers import reverse from django.contrib import messages from django.template.response import TemplateResponse from", "in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content,", "messages.add_message(request, messages.SUCCESS, _(u'批量添加成功%(success)s个, 失败%(fail)s个') % {\"success\": success, \"fail\": fail}) return HttpResponseRedirect(reverse('password_weaklist')) return render(request,", "content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id)", "messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist.html\",context={}) @licence_required def fail2ban_whitelist_add(request): form =", "\"form\" : form, \"domain\" : domain, }) @licence_required def password_weaklist(request): if request.method ==", "length * (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html',", "render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir", "from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models", "instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form})", "if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ###############################", "['id', 'name', 'proto', 'internal','block_fail', 'block_unexists', 'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search:", "Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid,", "xlrd.open_workbook(filename=None, file_contents=content) table = workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if line in", "request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist'))", "colums = ['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search: lists =", "+ 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number':", "= Fail2BanBlock.objects.get(id=block_id) form = BanBlockListForm(instance=obj) if request.method == \"POST\": form = BanBlockListForm(request.POST, instance=obj)", "BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request,", "HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj,", "int(data.get('start', '0')) page = start_num / length + 1 except ValueError: start_num =", "paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str =", "form = BanRuleForm() if request.method == \"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save()", "number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id", "messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data =", "lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums):", "for line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '')", ".models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis = get_redis_connection() for keyname", "if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method", "request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist'))", "'') search = data.get('search[value]', '') colums = ['id', 'password'] if search: lists =", "= PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail = 0, 0 fail_list = []", "d in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str,", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id):", "fail_list = [] password_list = [] if form.file_ext == 'txt': for line in", "[]} re_str = '<td.*?>(.*?)</td>' number = length * (page-1) + 1 for d", "Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj) if", "= request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]',", "@licence_required def fail2ban_blocklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status =", "form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def", "if request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS,", "length * (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html',", "if request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request,", "= Paginator(lists, length) try: lists = paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages)", "@licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]',", "= workbook.sheets()[0] for line in xrange(table.nrows): #前两行跳过 if line in (0,1): continue password", "% colums[int(order_column)]) lists = lists[:10000] try: length = int(data.get('length', 1)) except ValueError: length", "domain = Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form =", "= Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request,", "in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not", "from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.db.models import Q from django_redis import", "fail2ban_blocklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\")", "clear_fail2ban_cache(): redis = get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### #", "render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir", "length) try: lists = paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs =", "# from lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from", "= get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first()", "fail for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\": line})", "obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method == \"POST\": form = BanRuleForm(request.POST,", "= get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required", "EmptyPage, InvalidPage from django.db.models import Q from django_redis import get_redis_connection from django.utils.translation import", "'block_minute', 'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search) |", "form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return", "* (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d':", "\"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\":", "domain_id=obj.id) if request.method == \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid():", "', '').replace('\\t', '') if not password: continue password_list.append( password ) if form.file_ext in", "lib.tools import get_process_pid, restart_process, get_fail2ban_info, fail2ban_ip from lib.licence import licence_required from lib.tools import", "in xrange(table.nrows): #前两行跳过 if line in (0,1): continue password = table.row_values(line) password =", "django.http import HttpResponseRedirect, HttpResponse, Http404 from django.core.urlresolvers import reverse from django.contrib import messages", "'').replace('\\t', '') if not password: continue password_list.append( password ) if form.file_ext in ('xls',", "request.method == \"POST\": id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status", "if not password: continue password_list.append( password ) fail_list = form.save_password_list(password_list) fail = len(fail_list)", "csv lines = list(csv.reader(form.file_obj)) for elem in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000',", "django.utils.translation import ugettext_lazy as _ from app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session", "= 1 count = len(lists) if start_num >= count: page = 1 paginator", "form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist'))", "request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request,", "TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1", "= BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return", "_(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column", "form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form = SendFrequencyForm(instance=frequency_set, post=request.POST) if form.is_valid():", "re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm()", "messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" : domain, })", "data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'password'] if search: lists", "= ['id', 'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists =", "lists = lists[:10000] try: length = int(data.get('length', 1)) except ValueError: length = 1", "def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method", "messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" :", "return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\": form", "if status == \"delete\": Fail2Ban.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request,", "1 try: start_num = int(data.get('start', '0')) page = start_num / length + 1", "page = start_num / length + 1 except ValueError: start_num = 0 page", "@licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain: return", "fail2ban_blocklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '') search", "@licence_required def fail2ban_rulelist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status =", "page = 1 count = len(lists) if start_num >= count: page = 1", "domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method == \"POST\": form = PasswordWeakImportForm(data=request.POST,", "data.get('search[value]', '') colums = ['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search:", "HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data = request.GET order_column = data.get('order[0][column]',", "success, fail = 0, 0 fail_list = [] password_list = [] if form.file_ext", "not password: continue password_list.append( password ) fail_list = form.save_password_list(password_list) fail = len(fail_list) success", "= length * (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request,", "_(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id): obj = Fail2Ban.objects.get(id=rule_id)", "if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) ###############################", "'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1 return", "'update_time', 'disabled',] lists = Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search)", "in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method", "= BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return", "= Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists()", "HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request): if request.method ==", "messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_ajax(request): data =", "lists = PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column) < len(colums): if order_dir", "'txt': for line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t',", "PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column) < len(colums): if order_dir == 'desc':", "try: start_num = int(data.get('start', '0')) page = start_num / length + 1 except", "'').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password ) if form.file_ext", "Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def fail2ban_blocklist_ajax(request):", "@licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method ==", "1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm() if request.method ==", "else: lists = PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column) < len(colums): if", "if order_dir == 'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s'", "password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append( password", "== \"POST\": form = BanRuleForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return", "= 0 page = 1 count = lists.count() if start_num >= count: page", "elem in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if", "< len(colums): if order_dir == 'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists", "id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete()", "form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\"", "HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form =", "\"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number = length * (page-1) +", "ValueError: start_num = 0 page = 1 count = lists.count() if start_num >=", "except ValueError: start_num = 0 page = 1 count = len(lists) if start_num", "fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method == \"POST\": form", "= len(lists) if start_num >= count: page = 1 paginator = Paginator(lists, length)", "\"POST\": form = Fail2BanTrustForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return", "HttpResponse, Http404 from django.core.urlresolvers import reverse from django.contrib import messages from django.template.response import", "}) @licence_required def password_weaklist(request): if request.method == \"POST\": id = request.POST.get('id', \"\") status", "= lists.order_by('-%s' % colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) lists = lists[:10000]", ") if form.file_ext in ('xls', 'xlsx'): import xlrd content = form.file_obj.read() workbook =", "= request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanTrust.objects.filter(pk=id).delete() clear_fail2ban_cache()", "+= 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request) obj =", "'disabled',] lists = Fail2BanBlock.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) )", "Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if lists.exists() and order_column and int(order_column) <", "rule_id): obj = Fail2Ban.objects.get(id=rule_id) form = BanRuleForm(instance=obj) if request.method == \"POST\": form =", "return HttpResponseRedirect(reverse('password_weaklist')) return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET order_column =", "continue password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '')", "HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_block_add(request): form = BanBlockListForm() if request.method == \"POST\": form", "id = request.POST.get('id', \"\") status = request.POST.get('status', \"\") if status == \"delete\": Fail2BanBlock.objects.filter(pk=id).delete()", "form = Fail2BanTrustForm() if request.method == \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save()", "in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content,", "_(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\"", "messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj =", "except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count, \"iTotalDisplayRecords\":", "return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj)", "\"spam_check_outside_virus\" : form.spam_check_outside_virus.value, }) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first()", "@licence_required def password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]',", "= 1 paginator = Paginator(lists, length) try: lists = paginator.page(page) except (EmptyPage, InvalidPage):", "HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if request.method == \"POST\":", ") if form.file_ext == 'csv': import csv lines = list(csv.reader(form.file_obj)) for elem in", "PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust, Fail2BanBlock, PasswordWeakList def clear_fail2ban_cache(): redis =", "context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" :", "== \"POST\": form = SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS,", "len(colums): if order_dir == 'desc': lists = lists.order_by('-%s' % colums[int(order_column)]) else: lists =", "password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue", "import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban,", "= data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name', 'proto', 'internal','block_fail',", "copy # import os import json # import ConfigParser from django.shortcuts import render", "_(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" : form, \"domain\" : domain, }) @licence_required", "form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### #", "1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request)", "* (page-1) + 1 for d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d':", "if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required", "domain: return HttpResponseRedirect(reverse('security_frequency')) frequency_set = DomainAttr.objects.filter(domain_id=domain.id,type=\"system\",item=\"cf_sendlimit\").first() form = SendFrequencyForm(instance=frequency_set) if request.method == \"POST\":", "password: continue password_list.append( password ) fail_list = form.save_password_list(password_list) fail = len(fail_list) success =", "form = BanRuleForm(instance=obj) if request.method == \"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid():", "Q(ip__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums): if order_dir ==", "unicode_literals import re import copy # import os import json # import ConfigParser", "not obj: return HttpResponseRedirect(reverse('security_antispam')) spam_set = DomainAttr.objects.filter(domain_id=obj.id,type=\"system\",item=\"cf_antispam\").first() form = SpamSetForm(instance=spam_set, request=request, domain_id=obj.id) if", "else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\"", "post=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return render(request, \"security/frequency_setting.html\", context={ \"form\" :", "messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET", "d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str,", "lists.object_list: t = TemplateResponse(request, 'security/fail2ban_rulelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL))", "clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) @licence_required def fail2ban_rule_modify(request, rule_id):", "from django.contrib import messages from django.template.response import TemplateResponse from django.core.paginator import Paginator, EmptyPage,", "for elem in lines: password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '')", "fail2ban_rule_add(request): form = BanRuleForm() if request.method == \"POST\": form = BanRuleForm(request.POST) if form.is_valid():", "TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number += 1", "DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain # from lib.tools import get_process_pid, restart_process,", "'name', 'disabled',] lists = Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search)", "= PasswordWeakList.objects.filter( Q(password__contains=search) ) else: lists = PasswordWeakList.objects.all() if lists.exists() and order_column and", "if form.file_ext == 'txt': for line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000',", "get_redis_connection() for keyname in redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def", "\"\") if status == \"delete\": PasswordWeakList.objects.filter(pk=id).delete() clear_redis_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('password_weaklist')) return", "= [] if form.file_ext == 'txt': for line in form.file_obj.readlines(): password = line.strip().replace('\\n',", "== \"POST\": form = Fail2BanTrustForm(request.POST) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'添加成功')) return", "Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if lists.exists() and", "return HttpResponseRedirect(reverse('security_antispam')) else: messages.add_message(request, messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\":", "BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm from .models import Fail2Ban, Fail2BanTrust,", "== \"POST\": form = BanBlockListForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功'))", "redis.keys(\"fail2ban_cache*\") : redis.delete(keyname) clear_redis_cache() ############################### # 禁用IP列表 @licence_required def fail2ban_rulelist(request): if request.method ==", "'') order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'name',", "= data.get('search[value]', '') colums = ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists =", "count = len(lists) if start_num >= count: page = 1 paginator = Paginator(lists,", "password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue", "django.contrib import messages from django.template.response import TemplateResponse from django.core.paginator import Paginator, EmptyPage, InvalidPage", "paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs = {\"sEcho\": 0, \"iTotalRecords\": count,", "form, \"domain\" : domain, }) @licence_required def password_weaklist(request): if request.method == \"POST\": id", "% colums[int(order_column)]) else: lists = lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length', 1))", "0, \"iTotalRecords\": count, \"iTotalDisplayRecords\": count, \"aaData\": []} re_str = '<td.*?>(.*?)</td>' number = length", "= TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content, re.DOTALL)) number +=", "password_list.append( password ) fail_list = form.save_password_list(password_list) fail = len(fail_list) success = len(password_list) -", "import ConfigParser from django.shortcuts import render from django.http import HttpResponseRedirect, HttpResponse, Http404 from", "1 count = len(lists) if start_num >= count: page = 1 paginator =", "= lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and order_column and int(order_column) <", "_ from app.core.models import Mailbox, DomainAttr, Domain from app.utils.domain_session import get_domainid_bysession, get_session_domain #", "def fail2ban_rule_add(request): form = BanRuleForm() if request.method == \"POST\": form = BanRuleForm(request.POST) if", "number = length * (page-1) + 1 for d in lists.object_list: t =", "messages.ERROR, _(u'修改设置失败,请检查输入参数')) return render(request, \"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value,", "form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\" : form.spam_check_outside_spam.value, \"spam_check_outside_virus\"", "messages.SUCCESS, _(u'修改规则成功')) return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rule_add.html\",context={\"form\":form}) ############################### # 屏蔽IP @licence_required def fail2ban_blocklist(request):", "\"security/antispam.html\", context={ \"form\": form, \"domain\": obj, \"spam_check_local_spam\" : form.spam_check_local_spam.value, \"spam_check_local_virus\" : form.spam_check_local_virus.value, \"spam_check_outside_spam\"", "lists = Fail2Ban.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(proto__icontains=search) ) if", "password: continue password_list.append( password ) if form.file_ext in ('xls', 'xlsx'): import xlrd content", "order_column and int(order_column) < len(colums): if order_dir == 'desc': lists = lists.order_by('-%s' %", "\"delete\": Fail2BanBlock.objects.filter(pk=id).delete() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'删除成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_blocklist.html\",context={}) @licence_required def", "== \"POST\": form = BanRuleForm(request.POST, instance=obj) if form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改规则成功'))", "order_dir = data.get('order[0][dir]', '') search = data.get('search[value]', '') colums = ['id', 'ip', 'name',", "import clear_redis_cache from .forms import BanRuleForm, BanBlockListForm, Fail2BanTrustForm, SpamSetForm, \\ SendFrequencyForm, PasswordWeakForm, PasswordWeakImportForm", "'') search = data.get('search[value]', '') colums = ['id', 'ip', 'name', 'disabled',] lists =", "d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_blocklist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str,", "t.content, re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id =", "\"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir =", "'') colums = ['id', 'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if", "in lists.object_list: t = TemplateResponse(request, 'security/password_weak_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str, t.content,", "search = data.get('search[value]', '') colums = ['id', 'ip', 'name', 'disabled',] lists = Fail2BanTrust.objects.all()", "Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and order_column and int(order_column) < len(colums): if", "messages.SUCCESS, _(u'添加成功')) return HttpResponseRedirect(reverse('fail2ban_blocklist')) return render(request, \"security/fail2ban_block_add.html\",context={\"form\":form}) @licence_required def fail2ban_block_modify(request, block_id): obj =", "d in lists.object_list: t = TemplateResponse(request, 'security/fail2ban_whitelist_ajax.html', {'d': d, 'number': number}) t.render() rs[\"aaData\"].append(re.findall(re_str,", "# import ConfigParser from django.shortcuts import render from django.http import HttpResponseRedirect, HttpResponse, Http404", "form.is_valid(): form.save() clear_fail2ban_cache() messages.add_message(request, messages.SUCCESS, _(u'修改成功')) return HttpResponseRedirect(reverse('fail2ban_whitelist')) return render(request, \"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def", "- fail for line in fail_list: messages.add_message(request, messages.ERROR, _(u'批量添加失败 : %(fail)s') % {\"fail\":", "/ length + 1 except ValueError: start_num = 0 page = 1 count", "= line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t', '') if not password: continue password_list.append(", "Fail2BanTrust.objects.all() if search: lists = lists.filter( Q(name__icontains=search) | Q(ip__icontains=search) ) if lists.exists() and", "'name', 'ip', 'expire_time', 'update_time', 'disabled',] lists = Fail2BanBlock.objects.all() if search: lists = lists.filter(", "}) @licence_required def security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain:", "(0,1): continue password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ', '').replace('\\t',", "lists = lists.order_by('%s' % colums[int(order_column)]) try: length = int(data.get('length', 1)) except ValueError: length", "InvalidPage from django.db.models import Q from django_redis import get_redis_connection from django.utils.translation import ugettext_lazy", "SpamSetForm(instance=spam_set, post=request.POST, request=request, domain_id=obj.id) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, _(u'修改设置成功')) return HttpResponseRedirect(reverse('security_antispam')) else:", "return HttpResponseRedirect(reverse('fail2ban_rulelist')) return render(request, \"security/fail2ban_rulelist.html\",context={}) @licence_required def fail2ban_rulelist_ajax(request): data = request.GET order_column =", "Paginator(lists, length) try: lists = paginator.page(page) except (EmptyPage, InvalidPage): lists = paginator.page(paginator.num_pages) rs", "data.get('search[value]', '') colums = ['id', 'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search) )", "\"security/fail2ban_whitelist_add.html\",context={\"form\":form}) @licence_required def fail2ban_whitelist_modify(request, white_id): obj = Fail2BanTrust.objects.get(id=white_id) form = Fail2BanTrustForm(instance=obj) if request.method", "password_weaklist_import(request): form = PasswordWeakImportForm() domain_id = get_domainid_bysession(request) domain = get_session_domain(domain_id) if request.method ==", "== 'txt': for line in form.file_obj.readlines(): password = line.strip().replace('\\n', '').replace('\\r', '').replace('\\000', '').replace(' ',", "#前两行跳过 if line in (0,1): continue password = table.row_values(line) password = password.strip().replace('\\n', '').replace('\\r',", "def fail2ban_rulelist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '') order_dir = data.get('order[0][dir]', '')", "def security_frequency(request): domain_id = get_domainid_bysession(request) domain = Domain.objects.filter(id=domain_id).first() if not domain: return HttpResponseRedirect(reverse('security_frequency'))", "content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm() if request.method == \"POST\": form =", "re.DOTALL)) number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def security_antispam(request): domain_id = get_domainid_bysession(request)", "return render(request, \"security/password_weak_list.html\",context={}) @licence_required def password_weaklist_ajax(request): data = request.GET order_column = data.get('order[0][column]', '')", "= data.get('search[value]', '') colums = ['id', 'password'] if search: lists = PasswordWeakList.objects.filter( Q(password__contains=search)", "= '<td.*?>(.*?)</td>' number = length * (page-1) + 1 for d in lists.object_list:", "'csv': import csv lines = list(csv.reader(form.file_obj)) for elem in lines: password = line.strip().replace('\\n',", "\"POST\": form = PasswordWeakImportForm(data=request.POST, files=request.FILES) if form.is_valid(): success, fail = 0, 0 fail_list", "number += 1 return HttpResponse(json.dumps(rs), content_type=\"application/json\") @licence_required def fail2ban_rule_add(request): form = BanRuleForm() if" ]
[ "{thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want to try:", "(str): manpage description for most relevant manpage w.r.t. forum Returns: confidence (float): confidence", "in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens)", "(str): standard error captured in state variable forum (str): answer text from most", "apis[provider] # Skip this provider if it isn't supported on the target OS", "(C) 2020 IBM. All Rights Reserved. # # See LICENSE.txt file in the", "variable forum (str): answer text from most relevant stack exchange post w.r.t query", "= False for provider in apis: # We don't want to process the", "information. # import os from pathlib import Path from clai.tools.colorize_console import Colorize from", "variant\" logger.info(f\"==> Success!!! Found a result in the {apiString}\") # Find closest match", ".warning() \\ .append(\"Do you want to try: man {}\".format(command)) \\ .to_console() # Mark", "are # no search variants (ie: the singleton variant case), the variants #", "self.store.search(state.stderr, service=provider, size=1) if data: apiString = str(thisAPI) if variant is not None:", "# Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of", "search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\") variants:List", "outer loop as well if helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable to", "\\ .append( f\"Sorry. It looks like you have stumbled across a problem that", "self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity return confidence", "# For each search variant supported by the current API, query # the", "stack exchange post w.r.t query manpage (str): manpage description for most relevant manpage", "from most relevant stack exchange post w.r.t query manpage (str): manpage description for", "to clarify results from other providers if provider == \"manpages\": logger.info(f\"Skipping search provider", "logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move to", "post_execute(self, state: State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code:", "closest match b/w relevant data and manpages for unix searchResult = thisAPI.extract_search_result(data) manpages", "query, forum, manpage): \"\"\" Computes the confidence based on query, stack-exchange post answer", "text from most relevant stack exchange post w.r.t query manpage (str): manpage description", "Reserved. # # See LICENSE.txt file in the root directory # of this", "Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code ==", "provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider = apis[provider] # Skip", "searching break # If we found help, then break out of the outer", "helpWasFound = False for provider in apis: # We don't want to process", "manpage): \"\"\" Computes the confidence based on query, stack-exchange post answer and manpage", "== \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider = apis[provider] # Skip this", "query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command)", "don't want to process the manpages provider... thats the provider # that we", "on the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded", "f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a result in the {apiString}\") # Find", "manpage description 3. Return product of two similarities Args: query (str): standard error", "return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\")", "clai.server.agent import Agent from clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger import current_logger", "query and forum text 2. Compute token-wise similarity b/w forum text and manpage", "{}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound", "Internet doesn't have answer to.\\n\") \\ .info() \\ .append(f\"Have you tried turning it", "# list will only contain a single, Nonetype value. for variant in variants:", "# FIXME: Artificially boosted confidence confidence = 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command,", "return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in apis: # We don't", "= os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip()", "no search variants\") variants:List = [None] # For each search variant supported by", "other providers if provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider =", "\\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want to try: man", "description for most relevant manpage w.r.t. forum Returns: confidence (float): confidence on the", "{} \\t Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I", "list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List", "help; no need to keep searching break # If we found help, then", "Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like", "(str): answer text from most relevant stack exchange post w.r.t query manpage (str):", "exchange post w.r.t query manpage (str): manpage description for most relevant manpage w.r.t.", "thisAPI:Provider = apis[provider] # Skip this provider if it isn't supported on the", "data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching for", "= apis[provider] # Skip this provider if it isn't supported on the target", "'{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1)", "data: apiString = str(thisAPI) if variant is not None: apiString = f\"{apiString} '{variant}'", "found this in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do", "found help; no need to keep searching break # If we found help,", "for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens", "supported on the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==>", "in the {apiString}\") # Find closest match b/w relevant data and manpages for", "def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens =", "break out of the outer loop as well if helpWasFound: break if not", "forum Returns: confidence (float): confidence on the returned manpage w.r.t. query \"\"\" query_forum_similarity", "def compute_confidence(self, query, forum, manpage): \"\"\" Computes the confidence based on query, stack-exchange", "found relevant manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially boosted", "= 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man", "logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like you have stumbled across", "file in the root directory # of this source tree for licensing information.", "for provider in apis: # We don't want to process the manpages provider...", "inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens =", "return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching", "return confidence def get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command) def post_execute(self, state:", "# of this source tree for licensing information. # import os from pathlib", "import Agent from clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger import current_logger as", "Algorithm: 1. Compute token-wise similarity b/w query and forum text 2. Compute token-wise", "{apiString}\") # Find closest match b/w relevant data and manpages for unix searchResult", "the outer loop as well if helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable", "clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore from clai.server.agent import Agent from clai.server.command_message", "query, stack-exchange post answer and manpage Algorithm: 1. Compute token-wise similarity b/w query", "logger.info(\"==> Success!!! found relevant manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME:", "# Mark that help was indeed found helpWasFound = True # We've found", ".info() \\ .append(f\"Have you tried turning it OFF and ON again. ;)\") \\", "tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\" Computes the confidence based", "apiString = str(thisAPI) if variant is not None: apiString = f\"{apiString} '{variant}' variant\"", "this source tree for licensing information. # import os from pathlib import Path", "manpages provider... thats the provider # that we use to clarify results from", "a single, Nonetype value. for variant in variants: if variant is not None:", "the {apiString}\") # Find closest match b/w relevant data and manpages for unix", "this in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you", "if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in", "manpage Algorithm: 1. Compute token-wise similarity b/w query and forum text 2. Compute", "thats the provider # that we use to clarify results from other providers", "Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move to next provider in list logger.info(f\"Processing", "Success!!! Found a result in the {apiString}\") # Find closest match b/w relevant", "loop as well if helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable to be", "that help was indeed found helpWasFound = True # We've found help; no", "# We've found help; no need to keep searching break # If we", "break if not helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\", "manpages: logger.info(\"==> Success!!! found relevant manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1] #", "turning it OFF and ON again. ;)\") \\ .to_console() confidence=0.0 return Action(suggested_command=suggested_command, description=description,", "across a problem that even the Internet doesn't have answer to.\\n\") \\ .info()", "{}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching for you, \")", "manpage description for most relevant manpage w.r.t. forum Returns: confidence (float): confidence on", "similarity b/w query and forum text 2. Compute token-wise similarity b/w forum text", "variant case), the variants # list will only contain a single, Nonetype value.", "helpWasFound = True # We've found help; no need to keep searching break", "search variant supported by the current API, query # the data store to", "is not None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a result", "confidence def get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command) def post_execute(self, state: State)", "Has no search variants\") variants:List = [None] # For each search variant supported", "we found help, then break out of the outer loop as well if", "for x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self, query,", "there are # no search variants (ie: the singleton variant case), the variants", "confidence based on query, stack-exchange post answer and manpage Algorithm: 1. Compute token-wise", "provider if it isn't supported on the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping", "# Move to next provider in list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants():", "the current API, query # the data store to find the closest matching", "for most relevant manpage w.r.t. forum Returns: confidence (float): confidence on the returned", "= set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens) def", "unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!!", "store to find the closest matching data. If there are # no search", "command = manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence =", "We've found help; no need to keep searching break # If we found", "variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider,", "state: State) -> Action: return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: logger.info(\"====================", "result in the {apiString}\") # Find closest match b/w relevant data and manpages", "Rights Reserved. # # See LICENSE.txt file in the root directory # of", "not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue #", "= self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1) if data:", "\\t Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did", "{str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\") variants:List = [None]", "False for provider in apis: # We don't want to process the manpages", "b/w relevant data and manpages for unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult,", "Computes the confidence based on query, stack-exchange post answer and manpage Algorithm: 1.", "(float): confidence on the returned manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content'])", "Success!!! found relevant manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially", "is not None: logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant)", "provider 'manpages'\") continue thisAPI:Provider = apis[provider] # Skip this provider if it isn't", "description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like you have stumbled across a problem", "data. If there are # no search variants (ie: the singleton variant case),", "__init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence,", "supported by the current API, query # the data store to find the", "value. for variant in variants: if variant is not None: logger.info(f\"==> Searching variant", "\\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching for you, \") \\ .append(f\"and", "super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence):", "import Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore from clai.server.agent import", "manpage (str): manpage description for most relevant manpage w.r.t. forum Returns: confidence (float):", "forum text and manpage description 3. Return product of two similarities Args: query", "b/w query and forum text 2. Compute token-wise similarity b/w forum text and", "manpage) confidence = query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self, state: State) ->", "indeed found helpWasFound = True # We've found help; no need to keep", "Compute token-wise similarity b/w query and forum text 2. Compute token-wise similarity b/w", "in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want", "if data: apiString = str(thisAPI) if variant is not None: apiString = f\"{apiString}", "in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum, manpage):", "Move to next provider in list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==>", "state variable forum (str): answer text from most relevant stack exchange post w.r.t", "Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet", "manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence = 1.0 logger.info(\"==>", "logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider", "Compute token-wise similarity b/w forum text and manpage description 3. Return product of", "\\ .info() \\ .append(f\"Have you tried turning it OFF and ON again. ;)\")", "similarities Args: query (str): standard error captured in state variable forum (str): answer", "logger.info(f\"==> Has no search variants\") variants:List = [None] # For each search variant", "forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self,", "size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1) if data: apiString = str(thisAPI)", "from pathlib import Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore from", "if not helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append(", "Datastore from clai.server.agent import Agent from clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger", "did little bit of Internet searching for you, \") \\ .append(f\"and found this", "as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store", "to process the manpages provider... thats the provider # that we use to", "provider... thats the provider # that we use to clarify results from other", "most relevant manpage w.r.t. forum Returns: confidence (float): confidence on the returned manpage", "All Rights Reserved. # # See LICENSE.txt file in the root directory #", "set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self,", "description 3. Return product of two similarities Args: query (str): standard error captured", "provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move to next provider", "# # Copyright (C) 2020 IBM. All Rights Reserved. # # See LICENSE.txt", "'{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==>", "searching for you, \") \\ .append(f\"and found this in the {thisAPI}:\\n\") \\ .info()", "not helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry.", "= self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity return", "Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code", "len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\" Computes the confidence based on query,", "to next provider in list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has", "only contain a single, Nonetype value. for variant in variants: if variant is", "forum, manpage): \"\"\" Computes the confidence based on query, stack-exchange post answer and", "str(thisAPI) if variant is not None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!!", "helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It", "current API, query # the data store to find the closest matching data.", "tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\"", "of the outer loop as well if helpWasFound: break if not helpWasFound: logger.info(\"Failure:", "provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else:", "to keep searching break # If we found help, then break out of", "logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man {}\".format(command) description=Colorize()", "thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\") variants:List = [None] # For each", "product of two similarities Args: query (str): standard error captured in state variable", "matching data. If there are # no search variants (ie: the singleton variant", "try: man {}\".format(command)) \\ .to_console() # Mark that help was indeed found helpWasFound", "src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for", "query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity", "even the Internet doesn't have answer to.\\n\") \\ .info() \\ .append(f\"Have you tried", "want to process the manpages provider... thats the provider # that we use", "State) -> Action: return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: logger.info(\"==================== In", "logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider = apis[provider] # Skip this provider if", "src_tokens = set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in", "search provider 'manpages'\") continue thisAPI:Provider = apis[provider] # Skip this provider if it", "import Datastore from clai.server.agent import Agent from clai.server.command_message import State, Action, NOOP_COMMAND from", "if provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider = apis[provider] #", "results from other providers if provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue", "thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move", "confidence = query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self, state: State) -> Action:", "variant supported by the current API, query # the data store to find", "you want to try: man {}\".format(command)) \\ .to_console() # Mark that help was", "search provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move to next", "a problem that even the Internet doesn't have answer to.\\n\") \\ .info() \\", ".to_console() # Mark that help was indeed found helpWasFound = True # We've", "Action: return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute", "state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in apis:", "closest matching data. If there are # no search variants (ie: the singleton", "= manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence = 1.0 logger.info(\"==> Command: {}", "providers if provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider = apis[provider]", ".emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching for you, \") \\ .append(f\"and found", "class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path)", "answer and manpage Algorithm: 1. Compute token-wise similarity b/w query and forum text", "'{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move to next provider in", "by the current API, query # the data store to find the closest", "searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found", "relevant data and manpages for unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages',", "Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis()", "= query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self, state: State) -> Action: return", "We don't want to process the manpages provider... thats the provider # that", "tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens)", "break # If we found help, then break out of the outer loop", "x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum,", "\"\"\" Computes the confidence based on query, stack-exchange post answer and manpage Algorithm:", "/ len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\" Computes the confidence based on", "manpages for unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if manpages:", "confidence = manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence = 1.0 logger.info(\"==> Command:", "platforms: {str(thisAPI.get_excludes())}\") continue # Move to next provider in list logger.info(f\"Processing search provider", "Args: query (str): standard error captured in state variable forum (str): answer text", "variant in variants: if variant is not None: logger.info(f\"==> Searching variant '{variant}'\") data", "Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore from clai.server.agent import Agent", "to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like you", "self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self, state: State)", "\\ .append(f\"Have you tried turning it OFF and ON again. ;)\") \\ .to_console()", "= thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\") variants:List = [None] # For", "Skip this provider if it isn't supported on the target OS if not", "confidence = 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) # Set return data", "logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue # Move to next provider in list", "if it isn't supported on the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search", "search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants()", "State, Action, NOOP_COMMAND from clai.server.logger import current_logger as logger class HelpMeAgent(Agent): def __init__(self):", "boosted confidence confidence = 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) # Set", "in variants: if variant is not None: logger.info(f\"==> Searching variant '{variant}'\") data =", "Agent from clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger import current_logger as logger", "for variant in variants: if variant is not None: logger.info(f\"==> Searching variant '{variant}'\")", "Return product of two similarities Args: query (str): standard error captured in state", "self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1) if data: apiString", "if helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND", "'manpages'\") continue thisAPI:Provider = apis[provider] # Skip this provider if it isn't supported", "= True # We've found help; no need to keep searching break #", "self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found relevant manpages.\") command = manpages['commands'][-1]", "single, Nonetype value. for variant in variants: if variant is not None: logger.info(f\"==>", "IBM. All Rights Reserved. # # See LICENSE.txt file in the root directory", "state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for", "of two similarities Args: query (str): standard error captured in state variable forum", "looks like you have stumbled across a problem that even the Internet doesn't", "service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1) if data: apiString =", "1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man {}\".format(command)", "\\ .append(\"Do you want to try: man {}\".format(command)) \\ .to_console() # Mark that", "apis: # We don't want to process the manpages provider... thats the provider", "variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\") variants:List =", "confidence (float): confidence on the returned manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query,", "forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity return confidence def", "clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger import current_logger as logger class HelpMeAgent(Agent):", "Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens", "current_logger as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini')", "{}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound =", "not None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a result in", "of this source tree for licensing information. # import os from pathlib import", "in apis: # We don't want to process the manpages provider... thats the", "searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1) if data: apiString = str(thisAPI) if", "and manpage Algorithm: 1. Compute token-wise similarity b/w query and forum text 2.", "import Colorize from clai.server.searchlib.data import Datastore from clai.server.agent import Agent from clai.server.command_message import", "problem that even the Internet doesn't have answer to.\\n\") \\ .info() \\ .append(f\"Have", "If we found help, then break out of the outer loop as well", "the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want to", "to try: man {}\".format(command)) \\ .to_console() # Mark that help was indeed found", "and manpage description 3. Return product of two similarities Args: query (str): standard", "Returns: confidence (float): confidence on the returned manpage w.r.t. query \"\"\" query_forum_similarity =", "list will only contain a single, Nonetype value. for variant in variants: if", "data and manpages for unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5)", ".append( f\"Sorry. It looks like you have stumbled across a problem that even", "and manpages for unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if", "clarify results from other providers if provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\")", "on platforms: {str(thisAPI.get_excludes())}\") continue # Move to next provider in list logger.info(f\"Processing search", "helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like you have stumbled", "# # See LICENSE.txt file in the root directory # of this source", "import current_logger as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()),", "provider in apis: # We don't want to process the manpages provider... thats", "keep searching break # If we found help, then break out of the", "'{variant}' variant\" logger.info(f\"==> Success!!! Found a result in the {apiString}\") # Find closest", "and forum text 2. Compute token-wise similarity b/w forum text and manpage description", "state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False", "else: data = self.store.search(state.stderr, service=provider, size=1) if data: apiString = str(thisAPI) if variant", "Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr,", "from other providers if provider == \"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider", "have answer to.\\n\") \\ .info() \\ .append(f\"Have you tried turning it OFF and", "'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x", "licensing information. # import os from pathlib import Path from clai.tools.colorize_console import Colorize", "man {}\".format(command)) \\ .to_console() # Mark that help was indeed found helpWasFound =", "in the root directory # of this source tree for licensing information. #", "contain a single, Nonetype value. for variant in variants: if variant is not", "the closest matching data. If there are # no search variants (ie: the", "that even the Internet doesn't have answer to.\\n\") \\ .info() \\ .append(f\"Have you", "provider # that we use to clarify results from other providers if provider", "= self.store.search(state.stderr, service=provider, size=1) if data: apiString = str(thisAPI) if variant is not", "In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if", "= f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a result in the {apiString}\") #", "\\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want to try: man {}\".format(command)) \\", "variants: if variant is not None: logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr,", "the data store to find the closest matching data. If there are #", "clai.server.searchlib.data import Datastore from clai.server.agent import Agent from clai.server.command_message import State, Action, NOOP_COMMAND", "the Internet doesn't have answer to.\\n\") \\ .info() \\ .append(f\"Have you tried turning", "[None] # For each search variant supported by the current API, query #", "variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\") variants:List = [None] #", "tgt_sequence): src_tokens = set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x", "from clai.server.agent import Agent from clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger import", "# no search variants (ie: the singleton variant case), the variants # list", "out of the outer loop as well if helpWasFound: break if not helpWasFound:", "= set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()])", "query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity", "else: logger.info(f\"==> Has no search variants\") variants:List = [None] # For each search", "variant is not None: logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1,", "Internet searching for you, \") \\ .append(f\"and found this in the {thisAPI}:\\n\") \\", "import State, Action, NOOP_COMMAND from clai.server.logger import current_logger as logger class HelpMeAgent(Agent): def", "the confidence based on query, stack-exchange post answer and manpage Algorithm: 1. Compute", "# Copyright (C) 2020 IBM. All Rights Reserved. # # See LICENSE.txt file", "manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence = 1.0 logger.info(\"==> Command: {} \\t", "\"manpages\": logger.info(f\"Skipping search provider 'manpages'\") continue thisAPI:Provider = apis[provider] # Skip this provider", "1. Compute token-wise similarity b/w query and forum text 2. Compute token-wise similarity", "# import os from pathlib import Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data", "you tried turning it OFF and ON again. ;)\") \\ .to_console() confidence=0.0 return", "tried turning it OFF and ON again. ;)\") \\ .to_console() confidence=0.0 return Action(suggested_command=suggested_command,", "\"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity *", "For each search variant supported by the current API, query # the data", "case), the variants # list will only contain a single, Nonetype value. for", "bit of Internet searching for you, \") \\ .append(f\"and found this in the", "was indeed found helpWasFound = True # We've found help; no need to", "as well if helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable to be helpful\")", "{str(thisAPI.get_excludes())}\") continue # Move to next provider in list logger.info(f\"Processing search provider '{provider}'\")", "Artificially boosted confidence confidence = 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) #", "thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no", "two similarities Args: query (str): standard error captured in state variable forum (str):", "compute_confidence(self, query, forum, manpage): \"\"\" Computes the confidence based on query, stack-exchange post", "src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens & tgt_tokens) /", "data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data = self.store.search(state.stderr, service=provider, size=1) if", "if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has", "logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks", "continue # Move to next provider in list logger.info(f\"Processing search provider '{provider}'\") if", "b/w forum text and manpage description 3. Return product of two similarities Args:", "it OFF and ON again. ;)\") \\ .to_console() confidence=0.0 return Action(suggested_command=suggested_command, description=description, confidence=confidence)", "stack-exchange post answer and manpage Algorithm: 1. Compute token-wise similarity b/w query and", "# If we found help, then break out of the outer loop as", "= manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence = 1.0", "from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore from clai.server.agent import Agent from", "Nonetype value. for variant in variants: if variant is not None: logger.info(f\"==> Searching", "self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens", "Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little", "help, then break out of the outer loop as well if helpWasFound: break", "find the closest matching data. If there are # no search variants (ie:", "no search variants (ie: the singleton variant case), the variants # list will", "search variants\") variants:List = [None] # For each search variant supported by the", "clai.server.logger import current_logger as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path =", "manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially boosted confidence confidence", "See LICENSE.txt file in the root directory # of this source tree for", "manpages = self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found relevant manpages.\") command", "this provider if it isn't supported on the target OS if not thisAPI.can_run_on_this_os():", "next provider in list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search", "if manpages: logger.info(\"==> Success!!! found relevant manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1]", ".append(f\"Have you tried turning it OFF and ON again. ;)\") \\ .to_console() confidence=0.0", "Mark that help was indeed found helpWasFound = True # We've found help;", "query manpage (str): manpage description for most relevant manpage w.r.t. forum Returns: confidence", "the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on", "{}\".format(command)) \\ .to_console() # Mark that help was indeed found helpWasFound = True", "service=provider, size=1) if data: apiString = str(thisAPI) if variant is not None: apiString", "Copyright (C) 2020 IBM. All Rights Reserved. # # See LICENSE.txt file in", "be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like you have", "os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for", "thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found relevant manpages.\")", "source tree for licensing information. # import os from pathlib import Path from", "Found a result in the {apiString}\") # Find closest match b/w relevant data", "pathlib import Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore from clai.server.agent", "apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in apis: # We don't want to", "for you, \") \\ .append(f\"and found this in the {thisAPI}:\\n\") \\ .info() \\", "State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command,", "f\"Sorry. It looks like you have stumbled across a problem that even the", "# Find closest match b/w relevant data and manpages for unix searchResult =", "def get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command) def post_execute(self, state: State) ->", "HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def", "process the manpages provider... thats the provider # that we use to clarify", "= Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x in src_sequence.split()])", "-> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code,", "relevant stack exchange post w.r.t query manpage (str): manpage description for most relevant", "well if helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\")", "for licensing information. # import os from pathlib import Path from clai.tools.colorize_console import", "root directory # of this source tree for licensing information. # import os", "in list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\")", ".append(f\"and found this in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\", "variants:List = [None] # For each search variant supported by the current API,", "found helpWasFound = True # We've found help; no need to keep searching", "from clai.server.logger import current_logger as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path", "token-wise similarity b/w forum text and manpage description 3. Return product of two", "w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence =", "Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand:", "post w.r.t query manpage (str): manpage description for most relevant manpage w.r.t. forum", "relevant manpages.\") command = manpages['commands'][-1] confidence = manpages['dists'][-1] # FIXME: Artificially boosted confidence", "(ie: the singleton variant case), the variants # list will only contain a", "no need to keep searching break # If we found help, then break", "============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0':", "if variant is not None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found", "help was indeed found helpWasFound = True # We've found help; no need", "not None: logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else:", "forum_manpage_similarity return confidence def get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command) def post_execute(self,", "from clai.server.searchlib.data import Datastore from clai.server.agent import Agent from clai.server.command_message import State, Action,", "logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List =", "if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\") continue", "to find the closest matching data. If there are # no search variants", "confidence on the returned manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity", "need to keep searching break # If we found help, then break out", "the root directory # of this source tree for licensing information. # import", "text 2. Compute token-wise similarity b/w forum text and manpage description 3. Return", "data = self.store.search(state.stderr, service=provider, size=1) if data: apiString = str(thisAPI) if variant is", "answer text from most relevant stack exchange post w.r.t query manpage (str): manpage", "def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store = Datastore(inifile_path) def compute_simple_token_similarity(self,", "a result in the {apiString}\") # Find closest match b/w relevant data and", "set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()]) return", "similarity b/w forum text and manpage description 3. Return product of two similarities", "If there are # no search variants (ie: the singleton variant case), the", "returned manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage)", "variants (ie: the singleton variant case), the variants # list will only contain", "helpWasFound: break if not helpWasFound: logger.info(\"Failure: Unable to be helpful\") logger.info(\"============================================================================\") suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT)", "then break out of the outer loop as well if helpWasFound: break if", "variant is not None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a", "\\ .to_console() # Mark that help was indeed found helpWasFound = True #", "on the returned manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity =", "tree for licensing information. # import os from pathlib import Path from clai.tools.colorize_console", "each search variant supported by the current API, query # the data store", ".append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want to try: man {}\".format(command)) \\ .to_console()", "answer to.\\n\") \\ .info() \\ .append(f\"Have you tried turning it OFF and ON", "state: State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr:", "= [None] # For each search variant supported by the current API, query", "service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found relevant manpages.\") command = manpages['commands'][-1] confidence", "query # the data store to find the closest matching data. If there", "error captured in state variable forum (str): answer text from most relevant stack", "logger.info(f\"==> Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no search", "compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x in src_sequence.split()]) tgt_tokens = set([x.lower().strip()", "= str(thisAPI) if variant is not None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==>", "Action, NOOP_COMMAND from clai.server.logger import current_logger as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent,", "'0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in apis: # We", "little bit of Internet searching for you, \") \\ .append(f\"and found this in", "\\ .warning() \\ .append(\"Do you want to try: man {}\".format(command)) \\ .to_console() #", "= thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found relevant", "manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence", "logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data =", "forum text 2. Compute token-wise similarity b/w forum text and manpage description 3.", "post answer and manpage Algorithm: 1. Compute token-wise similarity b/w query and forum", "# Skip this provider if it isn't supported on the target OS if", "based on query, stack-exchange post answer and manpage Algorithm: 1. Compute token-wise similarity", "size=1) if data: apiString = str(thisAPI) if variant is not None: apiString =", "it isn't supported on the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider", "we use to clarify results from other providers if provider == \"manpages\": logger.info(f\"Skipping", "FIXME: Artificially boosted confidence confidence = 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence))", "LICENSE.txt file in the root directory # of this source tree for licensing", "logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__() inifile_path = os.path.join(str(Path(__file__).parent.absolute()), 'config.ini') self.store =", "you have stumbled across a problem that even the Internet doesn't have answer", "os from pathlib import Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import Datastore", "doesn't have answer to.\\n\") \\ .info() \\ .append(f\"Have you tried turning it OFF", "\") \\ .append(f\"and found this in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\", "apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a result in the {apiString}\")", "Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in apis: # We don't want", "# the data store to find the closest matching data. If there are", "captured in state variable forum (str): answer text from most relevant stack exchange", "like you have stumbled across a problem that even the Internet doesn't have", "forum (str): answer text from most relevant stack exchange post w.r.t query manpage", "\\ .append(f\"and found this in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning()", "-> Action: return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: logger.info(\"==================== In Helpme", "directory # of this source tree for licensing information. # import os from", "want to try: man {}\".format(command)) \\ .to_console() # Mark that help was indeed", "to.\\n\") \\ .info() \\ .append(f\"Have you tried turning it OFF and ON again.", "that we use to clarify results from other providers if provider == \"manpages\":", "the provider # that we use to clarify results from other providers if", "token-wise similarity b/w query and forum text 2. Compute token-wise similarity b/w forum", "query (str): standard error captured in state variable forum (str): answer text from", "continue thisAPI:Provider = apis[provider] # Skip this provider if it isn't supported on", "suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching for you,", "use to clarify results from other providers if provider == \"manpages\": logger.info(f\"Skipping search", "return len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\" Computes", "text and manpage description 3. Return product of two similarities Args: query (str):", "* forum_manpage_similarity return confidence def get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command) def", "suggested_command=NOOP_COMMAND description=Colorize().emoji(Colorize.EMOJI_ROBOT) \\ .append( f\"Sorry. It looks like you have stumbled across a", "= self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==> Success!!! found relevant manpages.\") command =", "stumbled across a problem that even the Internet doesn't have answer to.\\n\") \\", ".append(\"Do you want to try: man {}\".format(command)) \\ .to_console() # Mark that help", "search variants (ie: the singleton variant case), the variants # list will only", "NOOP_COMMAND from clai.server.logger import current_logger as logger class HelpMeAgent(Agent): def __init__(self): super(HelpMeAgent, self).__init__()", "import os from pathlib import Path from clai.tools.colorize_console import Colorize from clai.server.searchlib.data import", "Command: {} \\t Confidence:{}\".format(command, confidence)) # Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\", "the manpages provider... thats the provider # that we use to clarify results", "on query, stack-exchange post answer and manpage Algorithm: 1. Compute token-wise similarity b/w", "the variants # list will only contain a single, Nonetype value. for variant", "None: apiString = f\"{apiString} '{variant}' variant\" logger.info(f\"==> Success!!! Found a result in the", "you, \") \\ .append(f\"and found this in the {thisAPI}:\\n\") \\ .info() \\ .append(thisAPI.get_printable_output(data))", "def post_execute(self, state: State) -> Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError", "have stumbled across a problem that even the Internet doesn't have answer to.\\n\")", "in state variable forum (str): answer text from most relevant stack exchange post", "It looks like you have stumbled across a problem that even the Internet", "w.r.t query manpage (str): manpage description for most relevant manpage w.r.t. forum Returns:", "from clai.server.command_message import State, Action, NOOP_COMMAND from clai.server.logger import current_logger as logger class", "w.r.t. forum Returns: confidence (float): confidence on the returned manpage w.r.t. query \"\"\"", "2. Compute token-wise similarity b/w forum text and manpage description 3. Return product", "found help, then break out of the outer loop as well if helpWasFound:", "data store to find the closest matching data. If there are # no", "# We don't want to process the manpages provider... thats the provider #", "isn't supported on the target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\")", "logger.info(f\"==> Success!!! Found a result in the {apiString}\") # Find closest match b/w", "logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\")", "relevant manpage w.r.t. forum Returns: confidence (float): confidence on the returned manpage w.r.t.", "Find closest match b/w relevant data and manpages for unix searchResult = thisAPI.extract_search_result(data)", "Colorize from clai.server.searchlib.data import Datastore from clai.server.agent import Agent from clai.server.command_message import State,", "variants\") variants:List = [None] # For each search variant supported by the current", "if variant is not None: logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider,", "description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit of Internet searching for you, \") \\", "= self.compute_simple_token_similarity(forum[0]['Answer'], manpage) confidence = query_forum_similarity * forum_manpage_similarity return confidence def get_next_action(self, state:", "Action: logger.info(\"==================== In Helpme Bot:post_execute ============================\") logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr))", "& tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\" Computes the confidence", "# See LICENSE.txt file in the root directory # of this source tree", "will only contain a single, Nonetype value. for variant in variants: if variant", "size=5) if manpages: logger.info(\"==> Success!!! found relevant manpages.\") command = manpages['commands'][-1] confidence =", "the singleton variant case), the variants # list will only contain a single,", "self.store = Datastore(inifile_path) def compute_simple_token_similarity(self, src_sequence, tgt_sequence): src_tokens = set([x.lower().strip() for x in", "most relevant stack exchange post w.r.t query manpage (str): manpage description for most", "variants # list will only contain a single, Nonetype value. for variant in", ".info() \\ .append(thisAPI.get_printable_output(data)) \\ .warning() \\ .append(\"Do you want to try: man {}\".format(command))", "True # We've found help; no need to keep searching break # If", "for unix searchResult = thisAPI.extract_search_result(data) manpages = self.store.search(searchResult, service='manpages', size=5) if manpages: logger.info(\"==>", "manpage w.r.t. forum Returns: confidence (float): confidence on the returned manpage w.r.t. query", "3. Return product of two similarities Args: query (str): standard error captured in", "OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on platforms: {str(thisAPI.get_excludes())}\")", "confidence confidence = 1.0 logger.info(\"==> Command: {} \\t Confidence:{}\".format(command, confidence)) # Set return", "match b/w relevant data and manpages for unix searchResult = thisAPI.extract_search_result(data) manpages =", "get_next_action(self, state: State) -> Action: return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action:", "of Internet searching for you, \") \\ .append(f\"and found this in the {thisAPI}:\\n\")", "2020 IBM. All Rights Reserved. # # See LICENSE.txt file in the root", "provider in list logger.info(f\"Processing search provider '{provider}'\") if thisAPI.has_variants(): logger.info(f\"==> Has search variants:", "standard error captured in state variable forum (str): answer text from most relevant", "target OS if not thisAPI.can_run_on_this_os(): logger.info(f\"Skipping search provider '{provider}'\") logger.info(f\"==> Excluded on platforms:", "confidence)) # Set return data suggested_command=\"man {}\".format(command) description=Colorize() \\ .emoji(Colorize.EMOJI_ROBOT).append(f\"I did little bit", "# that we use to clarify results from other providers if provider ==", "{}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return Action(suggested_command=state.command)", "the returned manpage w.r.t. query \"\"\" query_forum_similarity = self.compute_simple_token_similarity(query, forum[0]['Content']) forum_manpage_similarity = self.compute_simple_token_similarity(forum[0]['Answer'],", "singleton variant case), the variants # list will only contain a single, Nonetype", "API, query # the data store to find the closest matching data. If", "None: logger.info(f\"==> Searching variant '{variant}'\") data = self.store.search(state.stderr, service=provider, size=1, searchType=variant) else: data", "len(src_tokens & tgt_tokens) / len(src_tokens) def compute_confidence(self, query, forum, manpage): \"\"\" Computes the", "x in src_sequence.split()]) tgt_tokens = set([x.lower().strip() for x in tgt_sequence.split()]) return len(src_tokens &", "logger.info(\"State:\\n\\tCommand: {}\\n\\tError Code: {}\\n\\tStderr: {}\".format(state.command, state.result_code, state.stderr)) logger.info(\"============================================================================\") if state.result_code == '0': return", "== '0': return Action(suggested_command=state.command) apis:OrderedDict=self.store.get_apis() helpWasFound = False for provider in apis: #", "Has search variants: {str(thisAPI.get_variants())}\") variants:List = thisAPI.get_variants() else: logger.info(f\"==> Has no search variants\")" ]
[ "path: Path to last valid command. children_names: Names of valid commands found at", "# Matching command that expects a subcommand, let's advance to # next level", "commands, e.g. ['orchestrate', 'images'] loader: Object that can load the module containing the", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery", "= os.path.sep.join([path, command]) can_continue = True break else: execute_command(command, parents, loader, arguments[index+1:]) #", "a non-leaf command, e.g.: orchestrate images (instead of orchestrate images create) Let's provide", "at immediate parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) # Was", "commands in the given path. A valid command is either a module, or", "log.info('Command name argument expected.') full_command = ' '.join(parents) else: log.error('Invalid choice: %s', command)", "Would walk looking for the following commands in this order: # 1. images", "that expects a subcommand, let's advance to # next level searching for a", "\"\"\"Executes main command-line entry-point. \"\"\" import inspect import logging import optparse import os", "import os import pkgutil import sys from orchestrate import base from orchestrate import", "if is_package: # Matching command that expects a subcommand, let's advance to #", "from %s', name, loader) module = loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if", ").format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command = command_type() return command def parse_arguments(command,", "command: Attempted command. parents: Upper command levels. path: Path to last valid command.", "full_command = ' '.join(parents[:-1]) log.info('Available commands for %s:', full_command) # If no commands", "error, or a non-existent command, e.g. orchestrate images crate (instead of create) orchestrate", "sensible recovery options when no command is found. There is likely a syntax", "with information about recovery options and possible subcommands at the deepest level we", "else: # It was an incomplete command if valid_commands: log.info('Command name argument expected.')", "%(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, )) command = create_command(name, loader)", "valid_commands: log.info('Command name argument expected.') full_command = ' '.join(parents) else: log.error('Invalid choice: %s',", "issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message = ( 'Could not find", "Save reference to modules in every level so that we can provide more", "images (instead of orchestrate images create) Let's provide user with information about recovery", "a package that contains at least one package or a module. This would", "loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if not inspect.isclass(command_type) \\ or not issubclass(command_type,", "path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:], } command = ''", "this file except in compliance with the License. # You may obtain a", "Command.run() with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments):", "loader, arguments[index+1:]) # nothing further to do return if not can_continue: # No", "orchestrate images create) Let's provide user with information about recovery options and possible", "parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global", "modules in every level so that we can provide more # information to", "If module could not be loaded or does not contain a subclass of", "commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery options", "Path to package to introspect. \"\"\" commands = [] for module_info in pkgutil.walk_packages([path]):", "command. children_names: Names of valid commands found at immediate parent above. \"\"\" parent_path", "import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why a given", "one valid submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest", "dict() parents = ['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names =", "to # next level searching for a leaf command parents.append(command) path = os.path.sep.join([path,", "# module and will attempt to run Command.run() with the remaining arguments: #", "# You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0", "not contain a subclass of OrchestateCommand. \"\"\" log.debug('loading module %s from %s', name,", "%s', command) log.info('Maybe you meant:') else: # It was an incomplete command if", "Command name, e.g. create. parents: Names of parent commands, e.g. ['orchestrate', 'images'] loader:", "ANY KIND, either express or implied. # See the License for the specific", "name=name, module=module.__file__, ) raise ModuleLoaderError(message) command = command_type() return command def parse_arguments(command, name,", "choice: %s', command) full_command = ' '.join(parents[:-1]) log.info('Available commands for %s:', full_command) #", "= dict() parents = ['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names", "commands at the current level, provide suggestions at the level above. if not", "2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the", "break return commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery options when", "orchestrate images crate (instead of create) orchestrate foobar create (foobar in not a", "create test-image-1 --packages=maya,nuke,houdini # Would walk looking for the following commands in this", "log.info('Maybe you meant:') else: # It was an incomplete command if valid_commands: log.info('Command", "module (not submodule) if it contains at least one valid submodule commands.append(module_info.name) break", "path: Path to package to introspect. \"\"\" commands = [] for module_info in", "= os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:], } command = '' #", "{ directory: parents[:], } command = '' # Iterate arguments trying to find", "pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why", "log.error('Invalid choice: %s', command) full_command = ' '.join(parents[:-1]) log.info('Available commands for %s:', full_command)", "loader, name, is_package in pkgutil.walk_packages([path]): # Save reference to modules in every level", "parents: Command hierarchy. arguments: Entire command-line arguments. Returns: A tuple of options and", "crate (instead of create) orchestrate foobar create (foobar in not a command) User", "parser.parse_args(arguments) return options, arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes the given command.", "options, arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes the given command. Args: name:", "License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "name: Module name. parents: Command hierarchy. arguments: Entire command-line arguments. Returns: A tuple", "# Was it a syntax error? if command not in children_names[parent_path]: log.error('Invalid choice:", "Was it a syntax error? if command not in children_names[parent_path]: log.error('Invalid choice: %s',", "in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command", "it into options and arguments. Args: command: OrchestrateCommand instance. name: Module name. parents:", "configure loggers. # pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide", "defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments", "loader): \"\"\"Returns a command instance from the given module loader. Args: name: Module", "at least one valid submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents, path,", "= { directory: parents[:], } command = '' # Iterate arguments trying to", "create. parents: Names of parent commands, e.g. ['orchestrate', 'images'] loader: Object that can", "# When it reaches \"create\", it would load the orchestrate.commands.image.create # module and", "copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "introspect. \"\"\" commands = [] for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name)", "command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, )) command", "the License. \"\"\"Executes main command-line entry-point. \"\"\" import inspect import logging import optparse", "do return if not can_continue: # No matching command at current level. Don't", "OF ANY KIND, either express or implied. # See the License for the", "loader. Raises: ModuleLoaderError: If module could not be loaded or does not contain", "%s', name, loader) module = loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if not", "arguments: Arguments relevant to the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents='", "user with information about recovery options and possible subcommands at the deepest level", "sys.argv. \"\"\" if arguments is None: arguments = sys.argv[1:] loaders = dict() parents", "parents=' '.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults)", "We need to import this module in order to configure loggers. # pylint:", "log.info('Available commands for %s:', full_command) # If no commands at the current level,", "i.e. sys.argv. \"\"\" if arguments is None: arguments = sys.argv[1:] loaders = dict()", "to find a matching command. module_path = os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name)", "This would effectively trim empty command packages. Args: path: Path to package to", "about recovery options and possible subcommands at the deepest level we managed to", "we managed to get. Args: command: Attempted command. parents: Upper command levels. path:", "module and will attempt to run Command.run() with the remaining arguments: # test-image-1", "Command hierarchy. arguments: Entire command-line arguments. Returns: A tuple of options and arguments.", "will attempt to run Command.run() with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for", "current level, provide suggestions at the level above. if not valid_commands: valid_commands =", "the following commands in this order: # 1. images # 2. create #", "arguments. If none specified, it uses the default provided from the command-line, i.e.", "parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments)", "= find_valid_commands(path) # Was it a syntax error? if command not in children_names[parent_path]:", "\"\"\" if arguments is None: arguments = sys.argv[1:] loaders = dict() parents =", "except (AttributeError, TypeError): message = ( 'Could not find implementation of OrchestrateCommand {name}", "from the given module loader. Args: name: Module name. loader: Module loader. Raises:", "for loader, name, is_package in pkgutil.walk_packages([path]): # Save reference to modules in every", "ModuleLoaderError(message) command = command_type() return command def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line", "arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents='", "' '.join(parents[:-1]) log.info('Available commands for %s:', full_command) # If no commands at the", "<reponame>muskanmahajan37/solutions-cloud-orchestrate # python3 # Copyright 2020 Google LLC # # Licensed under the", "is_package in pkgutil.walk_packages([path]): # Save reference to modules in every level so that", "= os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) # Execute command if we reach", "name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of", "break else: execute_command(command, parents, loader, arguments[index+1:]) # nothing further to do return if", "# For example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would walk looking", "not be loaded or does not contain a subclass of OrchestateCommand. \"\"\" log.debug('loading", "parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list", "%s:', full_command) # If no commands at the current level, provide suggestions at", "# Save reference to modules in every level so that we can provide", "parents, loader, arguments[index+1:]) # nothing further to do return if not can_continue: #", "LLC # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "parent commands, e.g. ['orchestrate', 'images'] loader: Object that can load the module containing", "options and possible subcommands at the deepest level we managed to get. Args:", "in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you meant:') else: # It was", "level, provide suggestions at the level above. if not valid_commands: valid_commands = find_valid_commands(parent_path)", "Args: path: Path to package to introspect. \"\"\" commands = [] for module_info", "to introspect. \"\"\" commands = [] for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg:", "provide suggestions at the level above. if not valid_commands: valid_commands = find_valid_commands(parent_path) for", "commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery options when no command", "details on why a given module could not be loaded. \"\"\" pass def", "command instance from the given module loader. Args: name: Module name. loader: Module", "arguments. # For example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would walk", "if we reach a submodule with a matching name if command == name:", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "submodule with a matching name, we try to load a command # instance", "for the following commands in this order: # 1. images # 2. create", "to run Command.run() with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index, command", "= os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it a syntax error? if command", "at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "orchestrate import base from orchestrate import utils # We need to import this", "no command is found. There is likely a syntax error, or a non-existent", "\"\"\" import inspect import logging import optparse import os import pkgutil import sys", "main command-line entry-point. \"\"\" import inspect import logging import optparse import os import", "need to import this module in order to configure loggers. # pylint: disable=unused-import", "not in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you meant:') else: # It", "we can provide more # information to user in case we fail to", "to user in case we fail to find a matching command. module_path =", "command is found. There is likely a syntax error, or a non-existent command,", "' {module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command = command_type() return command", "parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and splits it into options and arguments.", "attempt to run Command.run() with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index,", "\"\"\"Suggest sensible recovery options when no command is found. There is likely a", "License. \"\"\"Executes main command-line entry-point. \"\"\" import inspect import logging import optparse import", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "run Command.run() with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index, command in", "import base from orchestrate import utils # We need to import this module", "least one package or a module. This would effectively trim empty command packages.", "above. if not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s',", "could have typed a non-leaf command, e.g.: orchestrate images (instead of orchestrate images", "searching for a leaf command parents.append(command) path = os.path.sep.join([path, command]) can_continue = True", "import utils # We need to import this module in order to configure", "Args: command: Attempted command. parents: Upper command levels. path: Path to last valid", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "command-line and splits it into options and arguments. Args: command: OrchestrateCommand instance. name:", "return options, arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes the given command. Args:", "loggers. # pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details", "\"\"\"Provide details on why a given module could not be loaded. \"\"\" pass", "If none specified, it uses the default provided from the command-line, i.e. sys.argv.", "= ['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory:", "recovery options when no command is found. There is likely a syntax error,", "required by applicable law or agreed to in writing, software # distributed under", "a matching command by name. # If we find a submodule with a", "that can load the module containing the command. arguments: Arguments relevant to the", "with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments): command", "in enumerate(arguments): command = command.replace('-', '_') children_names[path] = [] can_continue = False for", "applicable law or agreed to in writing, software # distributed under the License", "Args: arguments: Command arguments. If none specified, it uses the default provided from", "\"\"\" pass def create_command(name, loader): \"\"\"Returns a command instance from the given module", "return commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery options when no", "next level searching for a leaf command parents.append(command) path = os.path.sep.join([path, command]) can_continue", "= loader children_names[path].append(name) # Execute command if we reach a submodule with a", "or a package that contains at least one package or a module. This", "or a module. This would effectively trim empty command packages. Args: path: Path", "for _ in pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if it contains at", "valid_commands = find_valid_commands(path) # Was it a syntax error? if command not in", "a module, or a package that contains at least one package or a", "%(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, )) command = create_command(name, loader) options,", "least one valid submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents, path, children_names):", "a subclass of OrchestateCommand. \"\"\" log.debug('loading module %s from %s', name, loader) module", "e.g. create. parents: Names of parent commands, e.g. ['orchestrate', 'images'] loader: Object that", "the given command. Args: name: Command name, e.g. create. parents: Names of parent", "= [] can_continue = False for loader, name, is_package in pkgutil.walk_packages([path]): # Save", "or agreed to in writing, software # distributed under the License is distributed", "if it contains at least one valid submodule commands.append(module_info.name) break return commands def", "= ' '.join(parents) else: log.error('Invalid choice: %s', command) full_command = ' '.join(parents[:-1]) log.info('Available", "parents[:], } command = '' # Iterate arguments trying to find a matching", "at the level above. if not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in", "the remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments): command =", "optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options, arguments", "test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments): command = command.replace('-', '_') children_names[path] =", "for index, command in enumerate(arguments): command = command.replace('-', '_') children_names[path] = [] can_continue", "# It was an incomplete command if valid_commands: log.info('Command name argument expected.') full_command", "Iterate arguments trying to find a matching command by name. # If we", "\"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser =", "else: execute_command(command, parents, loader, arguments[index+1:]) # nothing further to do return if not", "command. Args: name: Command name, e.g. create. parents: Names of parent commands, e.g.", "orchestrate import utils # We need to import this module in order to", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "def create_command(name, loader): \"\"\"Returns a command instance from the given module loader. Args:", "module. This would effectively trim empty command packages. Args: path: Path to package", "typed a non-leaf command, e.g.: orchestrate images (instead of orchestrate images create) Let's", "possible subcommands at the deepest level we managed to get. Args: command: Attempted", "likely a syntax error, or a non-existent command, e.g. orchestrate images crate (instead", "not find implementation of OrchestrateCommand {name} in module' ' {module}' ).format( name=name, module=module.__file__,", "test-image-1 --packages=maya,nuke,houdini # Would walk looking for the following commands in this order:", "in this order: # 1. images # 2. create # When it reaches", "at the current level, provide suggestions at the level above. if not valid_commands:", "to import this module in order to configure loggers. # pylint: disable=unused-import import", "a matching name if command == name: if is_package: # Matching command that", "find a submodule with a matching name, we try to load a command", "to the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments,", "There is likely a syntax error, or a non-existent command, e.g. orchestrate images", "pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for _ in", "= sys.argv[1:] loaders = dict() parents = ['orchestrate'] directory = os.path.dirname(__file__) path =", "module_path = os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) # Execute command if we", "['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:],", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "[] can_continue = False for loader, name, is_package in pkgutil.walk_packages([path]): # Save reference", "writing, software # distributed under the License is distributed on an \"AS IS\"", "loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns a command instance from the given", "module could not be loaded or does not contain a subclass of OrchestateCommand.", "valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def", "meant:') else: # It was an incomplete command if valid_commands: log.info('Command name argument", "command_type = getattr(module, 'Command') if not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise", "commands in this order: # 1. images # 2. create # When it", "License. # You may obtain a copy of the License at # #", "tuple of options and arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command}", "find a matching command. module_path = os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) #", "command. arguments: Arguments relevant to the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict(", "base from orchestrate import utils # We need to import this module in", "orchestrate.commands.image.create # module and will attempt to run Command.run() with the remaining arguments:", "command) full_command = ' '.join(parents[:-1]) log.info('Available commands for %s:', full_command) # If no", "module=module.__file__, ) raise ModuleLoaderError(message) command = command_type() return command def parse_arguments(command, name, parents,", "create # When it reaches \"create\", it would load the orchestrate.commands.image.create # module", ")) command = create_command(name, loader) options, arguments = parse_arguments(command, name, parents, arguments) if", "of options and arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS]", "from orchestrate import utils # We need to import this module in order", "compliance with the License. # You may obtain a copy of the License", "message = ( 'Could not find implementation of OrchestrateCommand {name} in module' '", "can_continue: # No matching command at current level. Don't look further. break suggest_recovery_options(command,", "in not a command) User could have typed a non-leaf command, e.g.: orchestrate", "to last valid command. children_names: Names of valid commands found at immediate parent", "uses the default provided from the command-line, i.e. sys.argv. \"\"\" if arguments is", "def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and splits it into options and", "of OrchestateCommand. \"\"\" log.debug('loading module %s from %s', name, loader) module = loader.find_module(name).load_module(name)", "not be loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns a command instance from", "= dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options)", "arguments. Args: command: OrchestrateCommand instance. name: Module name. parents: Command hierarchy. arguments: Entire", "description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group =", "loader) module = loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if not inspect.isclass(command_type) \\", "arguments. Returns: A tuple of options and arguments. \"\"\" log.debug('Parsing arguments') usage =", "advance to # next level searching for a leaf command parents.append(command) path =", "'.join(parents), command=name, arguments=arguments, )) command = create_command(name, loader) options, arguments = parse_arguments(command, name,", "given module loader. Args: name: Module name. loader: Module loader. Raises: ModuleLoaderError: If", "options and arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS]", "directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:], }", "children_names[path].append(name) # Execute command if we reach a submodule with a matching name", "and possible subcommands at the deepest level we managed to get. Args: command:", "could not be loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns a command instance", "log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, )) command = create_command(name,", "in case we fail to find a matching command. module_path = os.path.sep.join([path, name])", "\"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, )) command =", "( 'Could not find implementation of OrchestrateCommand {name} in module' ' {module}' ).format(", "with the remaining arguments. # For example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini", "to package to introspect. \"\"\" commands = [] for module_info in pkgutil.walk_packages([path]): if", "options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options, arguments def execute_command(name,", "syntax error? if command not in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you", "try: command_type = getattr(module, 'Command') if not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand):", "# Add module (not submodule) if it contains at least one valid submodule", "\"create\", it would load the orchestrate.commands.image.create # module and will attempt to run", "not use this file except in compliance with the License. # You may", "command-line arguments. Returns: A tuple of options and arguments. \"\"\" log.debug('Parsing arguments') usage", "found at immediate parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) #", "the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, ))", "= loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if not inspect.isclass(command_type) \\ or not", "Args: name: Command name, e.g. create. parents: Names of parent commands, e.g. ['orchestrate',", "arguments): \"\"\"Executes the given command. Args: name: Command name, e.g. create. parents: Names", "current level. Don't look further. break suggest_recovery_options(command, parents, path, children_names) if __name__ ==", "empty command packages. Args: path: Path to package to introspect. \"\"\" commands =", "level we managed to get. Args: command: Attempted command. parents: Upper command levels.", "trying to find a matching command by name. # If we find a", "it with the remaining arguments. # For example: # orchestrate images create test-image-1", "%s from %s', name, loader) module = loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command')", "name. # If we find a submodule with a matching name, we try", "License, Version 2.0 (the \"License\"); # you may not use this file except", "command) log.info('Maybe you meant:') else: # It was an incomplete command if valid_commands:", "Module name. parents: Command hierarchy. arguments: Entire command-line arguments. Returns: A tuple of", "os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it a syntax error? if command not", "under the License. \"\"\"Executes main command-line entry-point. \"\"\" import inspect import logging import", "orchestrate images (instead of orchestrate images create) Let's provide user with information about", "command # instance from the submodule and execute it with the remaining arguments.", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add", "e.g.: orchestrate images (instead of orchestrate images create) Let's provide user with information", "arguments = parser.parse_args(arguments) return options, arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes the", "order: # 1. images # 2. create # When it reaches \"create\", it", "level above. if not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands: log.info('", "= optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options')", "Execute command if we reach a submodule with a matching name if command", "2. create # When it reaches \"create\", it would load the orchestrate.commands.image.create #", "to do return if not can_continue: # No matching command at current level.", "expects a subcommand, let's advance to # next level searching for a leaf", "let's advance to # next level searching for a leaf command parents.append(command) path", "if not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s', valid_command.replace('_',", "command, e.g. orchestrate images crate (instead of create) orchestrate foobar create (foobar in", "a matching command. module_path = os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) # Execute", "# # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "it uses the default provided from the command-line, i.e. sys.argv. \"\"\" if arguments", "command = command_type() return command def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and", "= find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs", "be loaded or does not contain a subclass of OrchestateCommand. \"\"\" log.debug('loading module", "https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "# you may not use this file except in compliance with the License.", "children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you meant:') else: # It was an", "level so that we can provide more # information to user in case", "orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why a given module", "return if not can_continue: # No matching command at current level. Don't look", "nothing further to do return if not can_continue: # No matching command at", "we reach a submodule with a matching name if command == name: if", "so that we can provide more # information to user in case we", "valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments.", "agreed to in writing, software # distributed under the License is distributed on", "commands = [] for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path", "get. Args: command: Attempted command. parents: Upper command levels. path: Path to last", "parents: Names of parent commands, e.g. ['orchestrate', 'images'] loader: Object that can load", "of valid commands in the given path. A valid command is either a", "'images'] loader: Object that can load the module containing the command. arguments: Arguments", "module' ' {module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command = command_type() return", "command. parents: Upper command levels. path: Path to last valid command. children_names: Names", "obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless", "(the \"License\"); # you may not use this file except in compliance with", "not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-'))", "the current level, provide suggestions at the level above. if not valid_commands: valid_commands", "command not in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you meant:') else: #", "logging import optparse import os import pkgutil import sys from orchestrate import base", "arguments = sys.argv[1:] loaders = dict() parents = ['orchestrate'] directory = os.path.dirname(__file__) path", "user in case we fail to find a matching command. module_path = os.path.sep.join([path,", "information to user in case we fail to find a matching command. module_path", "further to do return if not can_continue: # No matching command at current", "example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would walk looking for the", "module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for", "# Unless required by applicable law or agreed to in writing, software #", "# If we find a submodule with a matching name, we try to", "parents.append(command) path = os.path.sep.join([path, command]) can_continue = True break else: execute_command(command, parents, loader,", "by applicable law or agreed to in writing, software # distributed under the", "parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of valid", "(foobar in not a command) User could have typed a non-leaf command, e.g.:", "Raises: ModuleLoaderError: If module could not be loaded or does not contain a", "the submodule and execute it with the remaining arguments. # For example: #", ") raise ModuleLoaderError(message) command = command_type() return command def parse_arguments(command, name, parents, arguments):", "TypeError): message = ( 'Could not find implementation of OrchestrateCommand {name} in module'", "if not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError):", "name. parents: Command hierarchy. arguments: Entire command-line arguments. Returns: A tuple of options", "dict( parents=' '.join(parents), command=name, arguments=arguments, )) command = create_command(name, loader) options, arguments =", "arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of valid commands", "'-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments. If none specified, it", "name, we try to load a command # instance from the submodule and", "packages. Args: path: Path to package to introspect. \"\"\" commands = [] for", "immediate parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it", "%s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments. If none", "sys.argv[1:] loaders = dict() parents = ['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory,", "valid commands in the given path. A valid command is either a module,", "implementation of OrchestrateCommand {name} in module' ' {module}' ).format( name=name, module=module.__file__, ) raise", "children_names = { directory: parents[:], } command = '' # Iterate arguments trying", "['orchestrate', 'images'] loader: Object that can load the module containing the command. arguments:", "load the module containing the command. arguments: Arguments relevant to the command. \"\"\"", "not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message", "full_command = ' '.join(parents) else: log.error('Invalid choice: %s', command) full_command = ' '.join(parents[:-1])", "a command # instance from the submodule and execute it with the remaining", "you meant:') else: # It was an incomplete command if valid_commands: log.info('Command name", "matching command by name. # If we find a submodule with a matching", "file except in compliance with the License. # You may obtain a copy", "module in order to configure loggers. # pylint: disable=unused-import import orchestrate.logger log =", "remaining arguments: # test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments): command = command.replace('-',", "the given module loader. Args: name: Module name. loader: Module loader. Raises: ModuleLoaderError:", "{command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults", "loader. Args: name: Module name. loader: Module loader. Raises: ModuleLoaderError: If module could", "options and arguments. Args: command: OrchestrateCommand instance. name: Module name. parents: Command hierarchy.", "'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options, arguments def", "from the command-line, i.e. sys.argv. \"\"\" if arguments is None: arguments = sys.argv[1:]", "a non-existent command, e.g. orchestrate images crate (instead of create) orchestrate foobar create", "[OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults =", "loaded or does not contain a subclass of OrchestateCommand. \"\"\" log.debug('loading module %s", "a module. This would effectively trim empty command packages. Args: path: Path to", "License for the specific language governing permissions and # limitations under the License.", "expected.') full_command = ' '.join(parents) else: log.error('Invalid choice: %s', command) full_command = '", "\"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents),", "managed to get. Args: command: Attempted command. parents: Upper command levels. path: Path", "# python3 # Copyright 2020 Google LLC # # Licensed under the Apache", "'_') children_names[path] = [] can_continue = False for loader, name, is_package in pkgutil.walk_packages([path]):", "parents, path, children_names): \"\"\"Suggest sensible recovery options when no command is found. There", "and arguments. Args: command: OrchestrateCommand instance. name: Module name. parents: Command hierarchy. arguments:", "# limitations under the License. \"\"\"Executes main command-line entry-point. \"\"\" import inspect import", "Names of parent commands, e.g. ['orchestrate', 'images'] loader: Object that can load the", "# pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on", "to in writing, software # distributed under the License is distributed on an", "in pkgutil.walk_packages([path]): # Save reference to modules in every level so that we", "= create_command(name, loader) options, arguments = parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG)", "have typed a non-leaf command, e.g.: orchestrate images (instead of orchestrate images create)", "(AttributeError, TypeError): message = ( 'Could not find implementation of OrchestrateCommand {name} in", "logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of valid commands in the given", "# next level searching for a leaf command parents.append(command) path = os.path.sep.join([path, command])", "command def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and splits it into options", "error? if command not in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you meant:')", "implied. # See the License for the specific language governing permissions and #", "= ' '.join(parents[:-1]) log.info('Available commands for %s:', full_command) # If no commands at", "\"License\"); # you may not use this file except in compliance with the", "[] for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path,", "# information to user in case we fail to find a matching command.", "return command def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and splits it into", "{parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage)", "utils # We need to import this module in order to configure loggers.", "command packages. Args: path: Path to package to introspect. \"\"\" commands = []", "is found. There is likely a syntax error, or a non-existent command, e.g.", "or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message = ( 'Could", "orchestrate foobar create (foobar in not a command) User could have typed a", "load the orchestrate.commands.image.create # module and will attempt to run Command.run() with the", "command if valid_commands: log.info('Command name argument expected.') full_command = ' '.join(parents) else: log.error('Invalid", "else: submodule_path = os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add module (not", "for a leaf command parents.append(command) path = os.path.sep.join([path, command]) can_continue = True break", "loader: Object that can load the module containing the command. arguments: Arguments relevant", "None: arguments = sys.argv[1:] loaders = dict() parents = ['orchestrate'] directory = os.path.dirname(__file__)", "from the submodule and execute it with the remaining arguments. # For example:", "a submodule with a matching name if command == name: if is_package: #", "command.replace('-', '_') children_names[path] = [] can_continue = False for loader, name, is_package in", "contains at least one valid submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents,", "command]) can_continue = True break else: execute_command(command, parents, loader, arguments[index+1:]) # nothing further", "pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if it contains at least one valid", "if arguments is None: arguments = sys.argv[1:] loaders = dict() parents = ['orchestrate']", "to find a matching command by name. # If we find a submodule", "either a module, or a package that contains at least one package or", "# orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would walk looking for the following", "raise TypeError() except (AttributeError, TypeError): message = ( 'Could not find implementation of", "or implied. # See the License for the specific language governing permissions and", "no commands at the current level, provide suggestions at the level above. if", "commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add module", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "path. A valid command is either a module, or a package that contains", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why a given module could not be", "levels. path: Path to last valid command. children_names: Names of valid commands found", "images # 2. create # When it reaches \"create\", it would load the", "and will attempt to run Command.run() with the remaining arguments: # test-image-1 --packages=maya,nuke,houdini", "e.g. orchestrate images crate (instead of create) orchestrate foobar create (foobar in not", "in writing, software # distributed under the License is distributed on an \"AS", "common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options, arguments def execute_command(name, parents,", "# Execute command if we reach a submodule with a matching name if", "Don't look further. break suggest_recovery_options(command, parents, path, children_names) if __name__ == '__main__': main()", "command=name, arguments=arguments, )) command = create_command(name, loader) options, arguments = parse_arguments(command, name, parents,", "command if we reach a submodule with a matching name if command ==", "arguments: # test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments): command = command.replace('-', '_')", "find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line.", "options, arguments = parser.parse_args(arguments) return options, arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes", "specified, it uses the default provided from the command-line, i.e. sys.argv. \"\"\" if", "in every level so that we can provide more # information to user", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "the level above. if not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands:", "commands found at immediate parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path)", "given command. Args: name: Command name, e.g. create. parents: Names of parent commands,", "arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes the given command. Args: name: Command", "reaches \"create\", it would load the orchestrate.commands.image.create # module and will attempt to", "arguments): \"\"\"Parse command-line and splits it into options and arguments. Args: command: OrchestrateCommand", "could not be loaded or does not contain a subclass of OrchestateCommand. \"\"\"", "command = '' # Iterate arguments trying to find a matching command by", "os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:], } command = '' # Iterate", "we fail to find a matching command. module_path = os.path.sep.join([path, name]) loaders[module_path] =", "module, or a package that contains at least one package or a module.", "{name} in module' ' {module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command =", "the deepest level we managed to get. Args: command: Attempted command. parents: Upper", "execute_command(command, parents, loader, arguments[index+1:]) # nothing further to do return if not can_continue:", "provide more # information to user in case we fail to find a", "not can_continue: # No matching command at current level. Don't look further. break", "# No matching command at current level. Don't look further. break suggest_recovery_options(command, parents,", "OrchestrateCommand instance. name: Module name. parents: Command hierarchy. arguments: Entire command-line arguments. Returns:", "'.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults)", "is either a module, or a package that contains at least one package", "command levels. path: Path to last valid command. children_names: Names of valid commands", "the orchestrate.commands.image.create # module and will attempt to run Command.run() with the remaining", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "in pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if it contains at least one", "you may not use this file except in compliance with the License. #", "\"\"\"Returns list of valid commands in the given path. A valid command is", "options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of valid commands in the", "'Command') if not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError,", "import inspect import logging import optparse import os import pkgutil import sys from", "arguments trying to find a matching command by name. # If we find", "# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "a matching name, we try to load a command # instance from the", "Google LLC # # Licensed under the Apache License, Version 2.0 (the \"License\");", "effectively trim empty command packages. Args: path: Path to package to introspect. \"\"\"", "OrchestrateCommand {name} in module' ' {module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command", "command, e.g.: orchestrate images (instead of orchestrate images create) Let's provide user with", "main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments. If none specified, it uses the", "is None: arguments = sys.argv[1:] loaders = dict() parents = ['orchestrate'] directory =", "pkgutil import sys from orchestrate import base from orchestrate import utils # We", "# Copyright 2020 Google LLC # # Licensed under the Apache License, Version", "def find_valid_commands(path): \"\"\"Returns list of valid commands in the given path. A valid", "'.join(parents) else: log.error('Invalid choice: %s', command) full_command = ' '.join(parents[:-1]) log.info('Available commands for", "with a matching name, we try to load a command # instance from", "at current level. Don't look further. break suggest_recovery_options(command, parents, path, children_names) if __name__", "Args: name: Module name. loader: Module loader. Raises: ModuleLoaderError: If module could not", "the module containing the command. arguments: Arguments relevant to the command. \"\"\" log.debug('execute", "contain a subclass of OrchestateCommand. \"\"\" log.debug('loading module %s from %s', name, loader)", "a syntax error, or a non-existent command, e.g. orchestrate images crate (instead of", "name if command == name: if is_package: # Matching command that expects a", "to load a command # instance from the submodule and execute it with", "use this file except in compliance with the License. # You may obtain", "Arguments relevant to the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents),", "command == name: if is_package: # Matching command that expects a subcommand, let's", "execute_command(name, parents, loader, arguments): \"\"\"Executes the given command. Args: name: Command name, e.g.", "it would load the orchestrate.commands.image.create # module and will attempt to run Command.run()", "it a syntax error? if command not in children_names[parent_path]: log.error('Invalid choice: %s', command)", "not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message = ( 'Could not", "for the specific language governing permissions and # limitations under the License. \"\"\"Executes", "index, command in enumerate(arguments): command = command.replace('-', '_') children_names[path] = [] can_continue =", "and arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format(", "to modules in every level so that we can provide more # information", "= getattr(module, 'Command') if not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError()", "name: if is_package: # Matching command that expects a subcommand, let's advance to", "specific language governing permissions and # limitations under the License. \"\"\"Executes main command-line", "# instance from the submodule and execute it with the remaining arguments. #", "following commands in this order: # 1. images # 2. create # When", "a subcommand, let's advance to # next level searching for a leaf command", "You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 #", "log.debug('loading module %s from %s', name, loader) module = loader.find_module(name).load_module(name) try: command_type =", "in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for _", "arguments = parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path):", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "enumerate(arguments): command = command.replace('-', '_') children_names[path] = [] can_continue = False for loader,", "= parser.parse_args(arguments) return options, arguments def execute_command(name, parents, loader, arguments): \"\"\"Executes the given", "= optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options,", "optparse import os import pkgutil import sys from orchestrate import base from orchestrate", "optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options())", "of create) orchestrate foobar create (foobar in not a command) User could have", "information about recovery options and possible subcommands at the deepest level we managed", "%(arguments)s', dict( parents=' '.join(parents), command=name, arguments=arguments, )) command = create_command(name, loader) options, arguments", "find_valid_commands(path) # Was it a syntax error? if command not in children_names[parent_path]: log.error('Invalid", "ModuleLoaderError: If module could not be loaded or does not contain a subclass", "parents: Upper command levels. path: Path to last valid command. children_names: Names of", "If we find a submodule with a matching name, we try to load", "= ( 'Could not find implementation of OrchestrateCommand {name} in module' ' {module}'", "No matching command at current level. Don't look further. break suggest_recovery_options(command, parents, path,", "2.0 (the \"License\"); # you may not use this file except in compliance", "command = create_command(name, loader) options, arguments = parse_arguments(command, name, parents, arguments) if options.verbose:", "matching name, we try to load a command # instance from the submodule", "options when no command is found. There is likely a syntax error, or", "import this module in order to configure loggers. # pylint: disable=unused-import import orchestrate.logger", "arguments: Command arguments. If none specified, it uses the default provided from the", "command-line entry-point. \"\"\" import inspect import logging import optparse import os import pkgutil", "submodule_path = os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add module (not submodule)", "permissions and # limitations under the License. \"\"\"Executes main command-line entry-point. \"\"\" import", "'' # Iterate arguments trying to find a matching command by name. #", "# nothing further to do return if not can_continue: # No matching command", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "for valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args:", "reach a submodule with a matching name if command == name: if is_package:", "subcommand, let's advance to # next level searching for a leaf command parents.append(command)", "reference to modules in every level so that we can provide more #", "recovery options and possible subcommands at the deepest level we managed to get.", "remaining arguments. # For example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would", "Let's provide user with information about recovery options and possible subcommands at the", "when no command is found. There is likely a syntax error, or a", "import sys from orchestrate import base from orchestrate import utils # We need", "# # Unless required by applicable law or agreed to in writing, software", "images create) Let's provide user with information about recovery options and possible subcommands", "valid_commands = find_valid_commands(parent_path) for valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None):", "express or implied. # See the License for the specific language governing permissions", "be loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns a command instance from the", "splits it into options and arguments. Args: command: OrchestrateCommand instance. name: Module name.", "children_names: Names of valid commands found at immediate parent above. \"\"\" parent_path =", "if command not in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe you meant:') else:", "common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return", "from orchestrate import base from orchestrate import utils # We need to import", "in module' ' {module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command = command_type()", "find implementation of OrchestrateCommand {name} in module' ' {module}' ).format( name=name, module=module.__file__, )", "parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options, arguments def execute_command(name, parents, loader, arguments):", "create_command(name, loader) options, arguments = parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options,", "parents=' '.join(parents), command=name, arguments=arguments, )) command = create_command(name, loader) options, arguments = parse_arguments(command,", "= False for loader, name, is_package in pkgutil.walk_packages([path]): # Save reference to modules", "It was an incomplete command if valid_commands: log.info('Command name argument expected.') full_command =", "arguments[index+1:]) # nothing further to do return if not can_continue: # No matching", "command-line. Args: arguments: Command arguments. If none specified, it uses the default provided", "command_type() return command def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and splits it", "either express or implied. # See the License for the specific language governing", "arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description,", "deepest level we managed to get. Args: command: Attempted command. parents: Upper command", "syntax error, or a non-existent command, e.g. orchestrate images crate (instead of create)", "trim empty command packages. Args: path: Path to package to introspect. \"\"\" commands", "above. \"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it a syntax", "dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options,", "this module in order to configure loggers. # pylint: disable=unused-import import orchestrate.logger log", "walk looking for the following commands in this order: # 1. images #", ") parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser,", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "instance from the given module loader. Args: name: Module name. loader: Module loader.", "Module name. loader: Module loader. Raises: ModuleLoaderError: If module could not be loaded", "last valid command. children_names: Names of valid commands found at immediate parent above.", "none specified, it uses the default provided from the command-line, i.e. sys.argv. \"\"\"", "= '' # Iterate arguments trying to find a matching command by name.", "\"\"\"Executes the given command. Args: name: Command name, e.g. create. parents: Names of", "arguments: Entire command-line arguments. Returns: A tuple of options and arguments. \"\"\" log.debug('Parsing", "it reaches \"create\", it would load the orchestrate.commands.image.create # module and will attempt", "loaders[module_path] = loader children_names[path].append(name) # Execute command if we reach a submodule with", "= True break else: execute_command(command, parents, loader, arguments[index+1:]) # nothing further to do", "# Iterate arguments trying to find a matching command by name. # If", "disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why a", "find a matching command by name. # If we find a submodule with", "try to load a command # instance from the submodule and execute it", "execute it with the remaining arguments. # For example: # orchestrate images create", "a command) User could have typed a non-leaf command, e.g.: orchestrate images (instead", "the remaining arguments. # For example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini #", "Upper command levels. path: Path to last valid command. children_names: Names of valid", "\"\"\" log.debug('loading module %s from %s', name, loader) module = loader.find_module(name).load_module(name) try: command_type", "parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it a syntax error? if", "is likely a syntax error, or a non-existent command, e.g. orchestrate images crate", "does not contain a subclass of OrchestateCommand. \"\"\" log.debug('loading module %s from %s',", "base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message = ( 'Could not find implementation", "log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments. If", "valid submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible", "and # limitations under the License. \"\"\"Executes main command-line entry-point. \"\"\" import inspect", "the License. # You may obtain a copy of the License at #", "os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:], } command =", "into options and arguments. Args: command: OrchestrateCommand instance. name: Module name. parents: Command", "a syntax error? if command not in children_names[parent_path]: log.error('Invalid choice: %s', command) log.info('Maybe", "module loader. Args: name: Module name. loader: Module loader. Raises: ModuleLoaderError: If module", "loader, arguments): \"\"\"Executes the given command. Args: name: Command name, e.g. create. parents:", "arguments) def find_valid_commands(path): \"\"\"Returns list of valid commands in the given path. A", "command parents.append(command) path = os.path.sep.join([path, command]) can_continue = True break else: execute_command(command, parents,", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "'Could not find implementation of OrchestrateCommand {name} in module' ' {module}' ).format( name=name,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "{description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults())", "= [] for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path =", "= \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser", "else: log.error('Invalid choice: %s', command) full_command = ' '.join(parents[:-1]) log.info('Available commands for %s:',", "why a given module could not be loaded. \"\"\" pass def create_command(name, loader):", "level searching for a leaf command parents.append(command) path = os.path.sep.join([path, command]) can_continue =", "given path. A valid command is either a module, or a package that", "directory: parents[:], } command = '' # Iterate arguments trying to find a", "instance from the submodule and execute it with the remaining arguments. # For", "--packages=maya,nuke,houdini for index, command in enumerate(arguments): command = command.replace('-', '_') children_names[path] = []", "\\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message = (", "' '.join(parents) else: log.error('Invalid choice: %s', command) full_command = ' '.join(parents[:-1]) log.info('Available commands", "command) User could have typed a non-leaf command, e.g.: orchestrate images (instead of", "Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0", "language governing permissions and # limitations under the License. \"\"\"Executes main command-line entry-point.", "of parent commands, e.g. ['orchestrate', 'images'] loader: Object that can load the module", "\"\"\" commands = [] for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else:", "import pkgutil import sys from orchestrate import base from orchestrate import utils #", "relevant to the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s', dict( parents=' '.join(parents), command=name,", "For example: # orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would walk looking for", "order to configure loggers. # pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class", "os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if it", "arguments=arguments, )) command = create_command(name, loader) options, arguments = parse_arguments(command, name, parents, arguments)", "can_continue = False for loader, name, is_package in pkgutil.walk_packages([path]): # Save reference to", "matching command. module_path = os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) # Execute command", "parents, loader, arguments): \"\"\"Executes the given command. Args: name: Command name, e.g. create.", "parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments = parser.parse_args(arguments) return options, arguments def execute_command(name, parents, loader,", "getattr(module, 'Command') if not inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except", "of valid commands found at immediate parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands", "if command == name: if is_package: # Matching command that expects a subcommand,", "module could not be loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns a command", "in the given path. A valid command is either a module, or a", "is_package: # Matching command that expects a subcommand, let's advance to # next", "and execute it with the remaining arguments. # For example: # orchestrate images", "os.path.sep.join([path, command]) can_continue = True break else: execute_command(command, parents, loader, arguments[index+1:]) # nothing", "with the License. # You may obtain a copy of the License at", "python3 # Copyright 2020 Google LLC # # Licensed under the Apache License,", "we try to load a command # instance from the submodule and execute", "sys from orchestrate import base from orchestrate import utils # We need to", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "\"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it a syntax error?", "module = loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if not inspect.isclass(command_type) \\ or", "list of valid commands in the given path. A valid command is either", "--packages=maya,nuke,houdini # Would walk looking for the following commands in this order: #", "command at current level. Don't look further. break suggest_recovery_options(command, parents, path, children_names) if", "may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # #", "pkgutil.walk_packages([path]): # Save reference to modules in every level so that we can", "User could have typed a non-leaf command, e.g.: orchestrate images (instead of orchestrate", "argument expected.') full_command = ' '.join(parents) else: log.error('Invalid choice: %s', command) full_command =", "would effectively trim empty command packages. Args: path: Path to package to introspect.", "command. module_path = os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) # Execute command if", "non-existent command, e.g. orchestrate images crate (instead of create) orchestrate foobar create (foobar", "command-line, i.e. sys.argv. \"\"\" if arguments is None: arguments = sys.argv[1:] loaders =", "submodule and execute it with the remaining arguments. # For example: # orchestrate", "law or agreed to in writing, software # distributed under the License is", "Attempted command. parents: Upper command levels. path: Path to last valid command. children_names:", "the License for the specific language governing permissions and # limitations under the", "can provide more # information to user in case we fail to find", "incomplete command if valid_commands: log.info('Command name argument expected.') full_command = ' '.join(parents) else:", "contains at least one package or a module. This would effectively trim empty", "ModuleLoaderError(Exception): \"\"\"Provide details on why a given module could not be loaded. \"\"\"", "import logging import optparse import os import pkgutil import sys from orchestrate import", "governing permissions and # limitations under the License. \"\"\"Executes main command-line entry-point. \"\"\"", "containing the command. arguments: Arguments relevant to the command. \"\"\" log.debug('execute %(parents)s %(command)s", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments. If none specified, it uses", "create_command(name, loader): \"\"\"Returns a command instance from the given module loader. Args: name:", "valid_command in valid_commands: log.info(' %s', valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments:", "= command.replace('-', '_') children_names[path] = [] can_continue = False for loader, name, is_package", "at least one package or a module. This would effectively trim empty command", "a leaf command parents.append(command) path = os.path.sep.join([path, command]) can_continue = True break else:", "default provided from the command-line, i.e. sys.argv. \"\"\" if arguments is None: arguments", "import optparse import os import pkgutil import sys from orchestrate import base from", "on why a given module could not be loaded. \"\"\" pass def create_command(name,", "= os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if", "= command_type() return command def parse_arguments(command, name, parents, arguments): \"\"\"Parse command-line and splits", "valid_command.replace('_', '-')) def main(arguments=None): \"\"\"Runs command-line. Args: arguments: Command arguments. If none specified,", "valid command is either a module, or a package that contains at least", "name, parents, arguments): \"\"\"Parse command-line and splits it into options and arguments. Args:", "matching command at current level. Don't look further. break suggest_recovery_options(command, parents, path, children_names)", "of OrchestrateCommand {name} in module' ' {module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message)", "path = os.path.sep.join([path, command]) can_continue = True break else: execute_command(command, parents, loader, arguments[index+1:])", "package that contains at least one package or a module. This would effectively", "limitations under the License. \"\"\"Executes main command-line entry-point. \"\"\" import inspect import logging", "one package or a module. This would effectively trim empty command packages. Args:", "if not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]):", "a given module could not be loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns", "parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands = find_valid_commands(path) # Was it a", "def suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery options when no command is", "in compliance with the License. # You may obtain a copy of the", "A valid command is either a module, or a package that contains at", "def execute_command(name, parents, loader, arguments): \"\"\"Executes the given command. Args: name: Command name,", "suggestions at the level above. if not valid_commands: valid_commands = find_valid_commands(parent_path) for valid_command", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group) parser.add_options(command.options) options, arguments =", "name]) loaders[module_path] = loader children_names[path].append(name) # Execute command if we reach a submodule", "A tuple of options and arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage: {parents}", "command in enumerate(arguments): command = command.replace('-', '_') children_names[path] = [] can_continue = False", "= os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = { directory: parents[:], } command", "module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if it contains", "not a command) User could have typed a non-leaf command, e.g.: orchestrate images", "\"\"\"Parse command-line and splits it into options and arguments. Args: command: OrchestrateCommand instance.", "if not can_continue: # No matching command at current level. Don't look further.", "given module could not be loaded. \"\"\" pass def create_command(name, loader): \"\"\"Returns a", "loader: Module loader. Raises: ModuleLoaderError: If module could not be loaded or does", "parents, arguments): \"\"\"Parse command-line and splits it into options and arguments. Args: command:", "class ModuleLoaderError(Exception): \"\"\"Provide details on why a given module could not be loaded.", "valid commands found at immediate parent above. \"\"\" parent_path = os.path.dirname(path) valid_commands =", "the specific language governing permissions and # limitations under the License. \"\"\"Executes main", "See the License for the specific language governing permissions and # limitations under", "(instead of create) orchestrate foobar create (foobar in not a command) User could", "the command-line, i.e. sys.argv. \"\"\" if arguments is None: arguments = sys.argv[1:] loaders", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "submodule) if it contains at least one valid submodule commands.append(module_info.name) break return commands", "name, is_package in pkgutil.walk_packages([path]): # Save reference to modules in every level so", "(not submodule) if it contains at least one valid submodule commands.append(module_info.name) break return", "more # information to user in case we fail to find a matching", "or does not contain a subclass of OrchestateCommand. \"\"\" log.debug('loading module %s from", "the given path. A valid command is either a module, or a package", "package or a module. This would effectively trim empty command packages. Args: path:", "if valid_commands: log.info('Command name argument expected.') full_command = ' '.join(parents) else: log.error('Invalid choice:", "{module}' ).format( name=name, module=module.__file__, ) raise ModuleLoaderError(message) command = command_type() return command def", "package to introspect. \"\"\" commands = [] for module_info in pkgutil.walk_packages([path]): if not", "# If no commands at the current level, provide suggestions at the level", "we find a submodule with a matching name, we try to load a", "os import pkgutil import sys from orchestrate import base from orchestrate import utils", "log = logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why a given module could", "Entire command-line arguments. Returns: A tuple of options and arguments. \"\"\" log.debug('Parsing arguments')", "module %s from %s', name, loader) module = loader.find_module(name).load_module(name) try: command_type = getattr(module,", "images crate (instead of create) orchestrate foobar create (foobar in not a command)", "%s', command) full_command = ' '.join(parents[:-1]) log.info('Available commands for %s:', full_command) # If", "subclass of OrchestateCommand. \"\"\" log.debug('loading module %s from %s', name, loader) module =", "command: OrchestrateCommand instance. name: Module name. parents: Command hierarchy. arguments: Entire command-line arguments.", "name. loader: Module loader. Raises: ModuleLoaderError: If module could not be loaded or", "log.error('Invalid choice: %s', command) log.info('Maybe you meant:') else: # It was an incomplete", "# test-image-1 --packages=maya,nuke,houdini for index, command in enumerate(arguments): command = command.replace('-', '_') children_names[path]", "choice: %s', command) log.info('Maybe you meant:') else: # It was an incomplete command", "or a non-existent command, e.g. orchestrate images crate (instead of create) orchestrate foobar", "command = command.replace('-', '_') children_names[path] = [] can_continue = False for loader, name,", "command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group", "looking for the following commands in this order: # 1. images # 2.", "leaf command parents.append(command) path = os.path.sep.join([path, command]) can_continue = True break else: execute_command(command,", "create) orchestrate foobar create (foobar in not a command) User could have typed", "loader children_names[path].append(name) # Execute command if we reach a submodule with a matching", "# 2. create # When it reaches \"create\", it would load the orchestrate.commands.image.create", "find_valid_commands(path): \"\"\"Returns list of valid commands in the given path. A valid command", "of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "found. There is likely a syntax error, or a non-existent command, e.g. orchestrate", "loader) options, arguments = parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments)", "in order to configure loggers. # pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__)", "When it reaches \"create\", it would load the orchestrate.commands.image.create # module and will", "submodule with a matching name if command == name: if is_package: # Matching", "every level so that we can provide more # information to user in", "= logging.getLogger(__name__) class ModuleLoaderError(Exception): \"\"\"Provide details on why a given module could not", "command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of valid commands in the given path.", "load a command # instance from the submodule and execute it with the", "can_continue = True break else: execute_command(command, parents, loader, arguments[index+1:]) # nothing further to", "Args: command: OrchestrateCommand instance. name: Module name. parents: Command hierarchy. arguments: Entire command-line", "module containing the command. arguments: Arguments relevant to the command. \"\"\" log.debug('execute %(parents)s", "command that expects a subcommand, let's advance to # next level searching for", "options, arguments = parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def", "children_names[path] = [] can_continue = False for loader, name, is_package in pkgutil.walk_packages([path]): #", "False for loader, name, is_package in pkgutil.walk_packages([path]): # Save reference to modules in", "the default provided from the command-line, i.e. sys.argv. \"\"\" if arguments is None:", "create) Let's provide user with information about recovery options and possible subcommands at", "a submodule with a matching name, we try to load a command #", "(instead of orchestrate images create) Let's provide user with information about recovery options", "children_names): \"\"\"Suggest sensible recovery options when no command is found. There is likely", "Version 2.0 (the \"License\"); # you may not use this file except in", "subcommands at the deepest level we managed to get. Args: command: Attempted command.", "except in compliance with the License. # You may obtain a copy of", "that contains at least one package or a module. This would effectively trim", "an incomplete command if valid_commands: log.info('Command name argument expected.') full_command = ' '.join(parents)", "# We need to import this module in order to configure loggers. #", "provide user with information about recovery options and possible subcommands at the deepest", "inspect import logging import optparse import os import pkgutil import sys from orchestrate", "# Would walk looking for the following commands in this order: # 1.", "log.debug('Parsing arguments') usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name,", "path, children_names): \"\"\"Suggest sensible recovery options when no command is found. There is", "Command arguments. If none specified, it uses the default provided from the command-line,", "by name. # If we find a submodule with a matching name, we", "Add module (not submodule) if it contains at least one valid submodule commands.append(module_info.name)", "Object that can load the module containing the command. arguments: Arguments relevant to", "\"\"\"Runs command-line. Args: arguments: Command arguments. If none specified, it uses the default", "was an incomplete command if valid_commands: log.info('Command name argument expected.') full_command = '", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "non-leaf command, e.g.: orchestrate images (instead of orchestrate images create) Let's provide user", "images create test-image-1 --packages=maya,nuke,houdini # Would walk looking for the following commands in", "matching name if command == name: if is_package: # Matching command that expects", "Matching command that expects a subcommand, let's advance to # next level searching", "_ in pkgutil.walk_packages([submodule_path]): # Add module (not submodule) if it contains at least", "raise ModuleLoaderError(message) command = command_type() return command def parse_arguments(command, name, parents, arguments): \"\"\"Parse", "the command. arguments: Arguments relevant to the command. \"\"\" log.debug('execute %(parents)s %(command)s %(arguments)s',", "to get. Args: command: Attempted command. parents: Upper command levels. path: Path to", "level. Don't look further. break suggest_recovery_options(command, parents, path, children_names) if __name__ == '__main__':", "inspect.isclass(command_type) \\ or not issubclass(command_type, base.OrchestrateCommand): raise TypeError() except (AttributeError, TypeError): message =", "name: Module name. loader: Module loader. Raises: ModuleLoaderError: If module could not be", "a command instance from the given module loader. Args: name: Module name. loader:", "# 1. images # 2. create # When it reaches \"create\", it would", "os.path.sep.join([path, name]) loaders[module_path] = loader children_names[path].append(name) # Execute command if we reach a", "name, loader) module = loader.find_module(name).load_module(name) try: command_type = getattr(module, 'Command') if not inspect.isclass(command_type)", "provided from the command-line, i.e. sys.argv. \"\"\" if arguments is None: arguments =", "loaders = dict() parents = ['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands']))", "commands for %s:', full_command) # If no commands at the current level, provide", "usage = \"\"\"Usage: {parents} {command} [OPTIONS] [ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, )", "a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "defaults = dict() defaults.update(utils.get_common_option_defaults()) defaults.update(command.defaults) parser.set_defaults(**defaults) common_options_group = optparse.OptionGroup(parser, 'Global options') common_options_group.add_options(utils.get_common_options()) parser.add_option_group(common_options_group)", "Names of valid commands found at immediate parent above. \"\"\" parent_path = os.path.dirname(path)", "name: Command name, e.g. create. parents: Names of parent commands, e.g. ['orchestrate', 'images']", "command is either a module, or a package that contains at least one", "Module loader. Raises: ModuleLoaderError: If module could not be loaded or does not", "for module_info in pkgutil.walk_packages([path]): if not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name])", "not module_info.ispkg: commands.append(module_info.name) else: submodule_path = os.path.sep.join([path, module_info.name]) for _ in pkgutil.walk_packages([submodule_path]): #", "If no commands at the current level, provide suggestions at the level above.", "Returns: A tuple of options and arguments. \"\"\" log.debug('Parsing arguments') usage = \"\"\"Usage:", "fail to find a matching command. module_path = os.path.sep.join([path, name]) loaders[module_path] = loader", "orchestrate images create test-image-1 --packages=maya,nuke,houdini # Would walk looking for the following commands", "if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns list of valid commands in", "== name: if is_package: # Matching command that expects a subcommand, let's advance", "command by name. # If we find a submodule with a matching name,", "with a matching name if command == name: if is_package: # Matching command", "full_command) # If no commands at the current level, provide suggestions at the", "case we fail to find a matching command. module_path = os.path.sep.join([path, name]) loaders[module_path]", "foobar create (foobar in not a command) User could have typed a non-leaf", "'.join(parents[:-1]) log.info('Available commands for %s:', full_command) # If no commands at the current", "} command = '' # Iterate arguments trying to find a matching command", "= parse_arguments(command, name, parents, arguments) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) command.run(options, arguments) def find_valid_commands(path): \"\"\"Returns", "TypeError() except (AttributeError, TypeError): message = ( 'Could not find implementation of OrchestrateCommand", "\"\"\"Returns a command instance from the given module loader. Args: name: Module name.", "at the deepest level we managed to get. Args: command: Attempted command. parents:", "1. images # 2. create # When it reaches \"create\", it would load", "would load the orchestrate.commands.image.create # module and will attempt to run Command.run() with", "name, e.g. create. parents: Names of parent commands, e.g. ['orchestrate', 'images'] loader: Object", "it contains at least one valid submodule commands.append(module_info.name) break return commands def suggest_recovery_options(command,", "arguments is None: arguments = sys.argv[1:] loaders = dict() parents = ['orchestrate'] directory", "True break else: execute_command(command, parents, loader, arguments[index+1:]) # nothing further to do return", "hierarchy. arguments: Entire command-line arguments. Returns: A tuple of options and arguments. \"\"\"", "of orchestrate images create) Let's provide user with information about recovery options and", "entry-point. \"\"\" import inspect import logging import optparse import os import pkgutil import", "pass def create_command(name, loader): \"\"\"Returns a command instance from the given module loader.", "and splits it into options and arguments. Args: command: OrchestrateCommand instance. name: Module", "instance. name: Module name. parents: Command hierarchy. arguments: Entire command-line arguments. Returns: A", "[ARGUMENTS] {description}\"\"\".format( parents=' '.join(parents), command=name, description=command.description, ) parser = optparse.OptionParser(usage=usage) defaults = dict()", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "Path to last valid command. children_names: Names of valid commands found at immediate", "valid command. children_names: Names of valid commands found at immediate parent above. \"\"\"", "for %s:', full_command) # If no commands at the current level, provide suggestions", "'commands'])) children_names = { directory: parents[:], } command = '' # Iterate arguments", "that we can provide more # information to user in case we fail", "suggest_recovery_options(command, parents, path, children_names): \"\"\"Suggest sensible recovery options when no command is found.", "parents = ['orchestrate'] directory = os.path.dirname(__file__) path = os.path.abspath(os.path.sep.join([directory, 'commands'])) children_names = {", "e.g. ['orchestrate', 'images'] loader: Object that can load the module containing the command.", "OrchestateCommand. \"\"\" log.debug('loading module %s from %s', name, loader) module = loader.find_module(name).load_module(name) try:", "name argument expected.') full_command = ' '.join(parents) else: log.error('Invalid choice: %s', command) full_command", "can load the module containing the command. arguments: Arguments relevant to the command.", "create (foobar in not a command) User could have typed a non-leaf command,", "to configure loggers. # pylint: disable=unused-import import orchestrate.logger log = logging.getLogger(__name__) class ModuleLoaderError(Exception):", "this order: # 1. images # 2. create # When it reaches \"create\"," ]
[ "\"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu() li=[] for", "= text # self.salary = salary # Employee.empCount += 1 # \"\"\" 你的", "print('kaishi') # self.text = text # self.salary = salary # Employee.empCount += 1", "百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text = text # self.salary =", "keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu() li=[] for i in mb.search(query=keyword,start=start):", "def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu() li=[] for i", "+= 1 # \"\"\" 你的 APPID AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果", "\"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text = text # self.salary", "# Employee.empCount += 1 # \"\"\" 你的 APPID AK SK \"\"\" def get(self,", "MagicBaidu import pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') #", "MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) # print(i) li.append(i) #", "\"\"\" def __init__(self): print('kaishi') # self.text = text # self.salary = salary #", "<reponame>napoler/Terry-toolkit<filename>Terry_toolkit/SearchBaidu.py from MagicBaidu import MagicBaidu import pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\"", "SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu() li=[]", "pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text =", "APPID AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb =", "MagicBaidu import MagicBaidu import pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self):", "= salary # Employee.empCount += 1 # \"\"\" 你的 APPID AK SK \"\"\"", "from MagicBaidu import MagicBaidu import pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def", "使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text = text # self.salary = salary", "get(keyword) \"\"\" mb = MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url'])", "# \"\"\" 你的 APPID AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword)", "# self.salary = salary # Employee.empCount += 1 # \"\"\" 你的 APPID AK", "self.text = text # self.salary = salary # Employee.empCount += 1 # \"\"\"", ">>> get(keyword) \"\"\" mb = MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url']))", "# self.text = text # self.salary = salary # Employee.empCount += 1 #", "for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) # print(i) li.append(i) # pprint.pprint(li) return", "text # self.salary = salary # Employee.empCount += 1 # \"\"\" 你的 APPID", "AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu()", "li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) # print(i) li.append(i) # pprint.pprint(li)", "\"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): #", "你的 APPID AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb", "import pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text", "salary # Employee.empCount += 1 # \"\"\" 你的 APPID AK SK \"\"\" def", "get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\" mb = MagicBaidu() li=[] for i in", "\"\"\" 你的 APPID AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>> get(keyword) \"\"\"", "mb = MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) # print(i)", "SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text = text #", "\"\"\" mb = MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) #", "__init__(self): print('kaishi') # self.text = text # self.salary = salary # Employee.empCount +=", "= MagicBaidu() li=[] for i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) # print(i) li.append(i)", "i in mb.search(query=keyword,start=start): # print(mb.get_real_url(i['url'])) i['url']=mb.get_real_url(i['url']) # print(i) li.append(i) # pprint.pprint(li) return li", "import MagicBaidu import pprint class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi')", "class SearchBaidu: \"\"\"SearchBaidu 百度搜索结果抓取 使用https://github.com/napoler/MagicBaidu \"\"\" def __init__(self): print('kaishi') # self.text = text", "Employee.empCount += 1 # \"\"\" 你的 APPID AK SK \"\"\" def get(self, keyword,start=0):", "1 # \"\"\" 你的 APPID AK SK \"\"\" def get(self, keyword,start=0): \"\"\"获取百度搜索结果 >>>", "def __init__(self): print('kaishi') # self.text = text # self.salary = salary # Employee.empCount", "self.salary = salary # Employee.empCount += 1 # \"\"\" 你的 APPID AK SK" ]
[ "Camera, CameraControl, getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui", "w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox)", "import glob import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def", "self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open", "cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None]", "os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent)", "= False return True # Try to open camera try: if not cam.open():", "index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def closeEvent(self, e): self.options.geometry =", "self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\" % cam.name) self.camControl", "PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget,", "if not cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self,", "= [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False self.camControl = None self.camLabel", "\"<br>\" \"This software is licensed under WTFPL. See COPYING file for details.<br>\" )", "= os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text,", "MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox =", "= [\"None\"] + [cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts", "\"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self):", "import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit", "self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return", "not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image '%s'.\" % path)", "if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright (C)", "self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\",", ") if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn", "msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching: return False self.switching = True", "image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image '%s'.\" % path) def", "self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int) def cameraChangedSlot(self, index): if index >=", "parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options =", "fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu =", "from .optionswin import OptionsWin from .camera import Camera, CameraControl, getCameraDevices from .camlabel import", "action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def", "'%s'.\" % cam.name) self.camControl = None return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True)", "\"Image File (*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg =", "hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry)", "saveImage(self, path): image = self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\" % path)", "self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\",", "def on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\"", "self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image =", "def cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def", "% path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self,", "software is licensed under WTFPL. See COPYING file for details.<br>\" ) QMessageBox.about( self,", ") QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam):", "setCamera(self, cam): if self.switching: return False self.switching = True self.snapAction.setEnabled(False) # Stop the", "self.camControl = None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def", "href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under WTFPL. See COPYING file for details.<br>\"", "return True @pyqtSlot(int) def cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def", "@pyqtSlot(int) def cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras()", "image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save", "self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable", "True @pyqtSlot(int) def cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self):", "% QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching: return False self.switching", "self.camControl: self.camControl.stopGrab() self.camControl = None # Show blank image if we don't have", "*.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\"", "False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self,", "self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\",", "QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools import logging import os import", "successfully self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int) def cameraChangedSlot(self, index): if index", "self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction", "return True # Try to open camera try: if not cam.open(): raise Exception()", "camera if self.camControl: self.camControl.stopGrab() self.camControl = None # Show blank image if we", "Stop the current camera if self.camControl: self.camControl.stopGrab() self.camControl = None # Show blank", "self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts for action in self.camActions: self.removeAction(action)", "QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools import logging import", "glob import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self,", "from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout,", "path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't", "self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File (*.png", "hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None)", "getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage,", "self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction", "[] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False self.camControl = None self.camLabel =", "= CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera", "self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): #", "Try to open camera try: if not cam.open(): raise Exception() self.camControl = CameraControl(self,", "import os import glob import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class", "import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent, text, action,", "None return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching = False return True", "self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def", "self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def", "self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\" %", "self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\",", "items = [\"None\"] + [cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add", "return False self.switching = True self.snapAction.setEnabled(False) # Stop the current camera if self.camControl:", "from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton,", "Show blank image if we don't have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path,", "if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def closeEvent(self, e): self.options.geometry", "cam.name) self.camControl = None return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching =", "% path) def on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName(", "WTFPL. See COPYING file for details.<br>\" ) QMessageBox.about( self, \"About %s\" % QApplication.applicationName(),", "class Action(QAction): def __init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut:", "= OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras()", "import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent,", "path): image = self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\" % path) image", "0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def closeEvent(self, e): self.options.geometry = self.saveGeometry() self.options.save()", "file for details.<br>\" ) QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) )", "= Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl:", "self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts for", "# Clear old shortcuts for action in self.camActions: self.removeAction(action) # Read camera files", "return logging.debug(\"saving '%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not", "horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save", "QLineEdit() self.switching = False self.camControl = None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self)", "\"camera.png\"))) self.switching = False return True # Try to open camera try: if", "QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools import logging import os", "% cam.name) self.camControl = None return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching", "time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent, text, action, shortcut=None,", "= False self.camControl = None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu()", "if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def", "= self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction()", "not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self,", "current camera if self.camControl: self.camControl.stopGrab() self.camControl = None # Show blank image if", "Camera '%s'.\" % cam.name) self.camControl = None return self.setCamera(None) # Camera opened successfully", "= False return True @pyqtSlot(int) def cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index])", "hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\",", "= Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\"))", "True self.snapAction.setEnabled(False) # Stop the current camera if self.camControl: self.camControl.stopGrab() self.camControl = None", "the current camera if self.camControl: self.camControl.stopGrab() self.camControl = None # Show blank image", "vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts", "self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if", "don't have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False", "\"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self)))", "cameras for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\",", "OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit", "image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath,", "image = self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\" % path) image =", "from .camlabel import CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage, QIcon", "= QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda:", "return fn def saveImage(self, path): image = self.imgLabel.image() if not image: return logging.debug(\"saving", "self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\",", "= [None] + cameras # Update our camera combo box items = [\"None\"]", "self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu", "= True self.snapAction.setEnabled(False) # Stop the current camera if self.camControl: self.camControl.stopGrab() self.camControl =", "in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None] + cameras # Update our", "+ cameras # Update our camera combo box items = [\"None\"] + [cam.name", "if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) #", "box items = [\"None\"] + [cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) #", "__init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled)", "CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu", "if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return True #", "QLabel, QPushButton, QComboBox, QLineEdit import functools import logging import os import glob import", "to open camera try: if not cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel,", "os import glob import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction):", "= image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\",", "our camera combo box items = [\"None\"] + [cam.name for cam in cameras]", "\"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox", "self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox =", "QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools import logging", "self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old", "self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction)", "f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action)", "cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def closeEvent(self,", "f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close() def", "False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\",", "cameras.sort(key=lambda cam: cam.name) self.cameras = [None] + cameras # Update our camera combo", "\"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under WTFPL.", "def setCamera(self, cam): if self.switching: return False self.switching = True self.snapAction.setEnabled(False) # Stop", "\"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self,", "not image: return logging.debug(\"saving '%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert )", "fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None] + cameras # Update", "None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path,", "\"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction =", "= CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self):", "def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg", "# Add shortcuts for cameras for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i)", "import functools import logging import os import glob import sys import time _data_path", "def on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save", "cam.name) self.cameras = [None] + cameras # Update our camera combo box items", "if self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not", "'%s'.\" % path) def on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path =", "self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return True # Try to open camera", "= QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False self.camControl", "\"data\") class Action(QAction): def __init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent) if", "path) def on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self,", "QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout()", "self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False self.camControl = None self.camLabel = CamLabel(self)", "open camera try: if not cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel, cam)", "COPYING file for details.<br>\" ) QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion())", "Image\", \"Couldn't Save Image '%s'.\" % path) def on_saveAction(self): path = self.savePath() self.saveImage(path)", "Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\",", "QLineEdit import functools import logging import os import glob import sys import time", "Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox", "shortcuts for cameras for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action =", "os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self, path): image", "from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import QApplication,", "QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools", "self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w =", "= os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\"", "on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\"", "i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image", "False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show))", "glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None] + cameras # Update our camera", "def close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image)", "not cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't", "pyqtSlot from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction,", ".camera import Camera, CameraControl, getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore import pyqtSlot", "QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False self.camControl =", "toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self,", "# Stop the current camera if self.camControl: self.camControl.stopGrab() self.camControl = None # Show", "import pyqtSlot from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow,", "def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox()", "= ( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software", "except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\" % cam.name) self.camControl =", "QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path,", "cam: cam.name) self.cameras = [None] + cameras # Update our camera combo box", "if not image: return logging.debug(\"saving '%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert", "Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu", "self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar()", "on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\"", "Action(QAction): def __init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut)", "self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu =", "self.camControl.stopGrab() self.camControl = None # Show blank image if we don't have camera", "Save Image '%s'.\" % path) def on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self):", "action in self.camActions: self.removeAction(action) # Read camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras", "from .app import data_path from .optionswin import OptionsWin from .camera import Camera, CameraControl,", "Image '%s'.\" % path) def on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path", "[Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None] + cameras", "*.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{} v{}<br>\"", "self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self,", "\"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\",", "if self.switching: return False self.switching = True self.snapAction.setEnabled(False) # Stop the current camera", "camera try: if not cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab()", "Camera opened successfully self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int) def cameraChangedSlot(self, index):", "msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This", "QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if", "__init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox() self.camActions", "= self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w", "QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus())", "createLayout(self): w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot)", ">= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def closeEvent(self, e): self.options.geometry = self.saveGeometry()", "have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return", "\"Couldn't Save Image '%s'.\" % path) def on_saveAction(self): path = self.savePath() self.saveImage(path) def", "Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self):", "See COPYING file for details.<br>\" ) QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(),", "licensed under WTFPL. See COPYING file for details.<br>\" ) QMessageBox.about( self, \"About %s\"", "Save Image\", \"Couldn't Save Image '%s'.\" % path) def on_saveAction(self): path = self.savePath()", "def createLayout(self): w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\"))", "range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f, \"Ctrl+%d\" % i)", "self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path):", "= None # Show blank image if we don't have camera if cam", "Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab()", "details.<br>\" ) QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self,", "for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f,", "True # Try to open camera try: if not cam.open(): raise Exception() self.camControl", "self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(),", "self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\",", "self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File", "= self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction)", "in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f, \"Ctrl+%d\" %", "\"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts for action", "raise Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\",", "if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self)", "self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if", "toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda:", "QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching: return False self.switching =", "\"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching: return", "self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras =", "we don't have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching =", "logging.debug(\"saving '%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path,", "for action in self.camActions: self.removeAction(action) # Read camera files path = os.path.join(self.options.cfgPath, \"*.json\")", "self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit =", "hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def", "cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras for i in", "fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self, path): image =", "self.switching = False return True @pyqtSlot(int) def cameraChangedSlot(self, index): if index >= 0:", "os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras", "self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction,", "self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image", "%s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching: return False", "QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel,", "savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn =", "\"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self, path): image = self.imgLabel.image() if not", "== None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return True # Try to", "old shortcuts for action in self.camActions: self.removeAction(action) # Read camera files path =", "\"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\")", "(C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under WTFPL. See COPYING", "\"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self,", "self.removeAction(action) # Read camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for", "Update our camera combo box items = [\"None\"] + [cam.name for cam in", "\"*.json\") cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras =", "def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn", "self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction", "= Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\",", "fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda:", ") if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright", "self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000))", "on_saveAction(self): path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\",", "fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu", "[\"None\"] + [cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for", "path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path,", "shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self):", "v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under", "[] self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching =", "os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" %", "As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\")", "self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False)", "Open Camera\", \"Couldn't Open Camera '%s'.\" % cam.name) self.camControl = None return self.setCamera(None)", "on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True)", "QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image '%s'.\" % path) def on_saveAction(self): path", "quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image '%s'.\" % path) def on_saveAction(self):", "= functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action)", "+ [cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras", "\"Couldn't Open Camera '%s'.\" % cam.name) self.camControl = None return self.setCamera(None) # Camera", "saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip()", "self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path", "= CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\")))", "\"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction)", "image if we don't have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\")))", "os.path.exists(path): os.mkdir(path) fn = os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self, path):", "data_path from .optionswin import OptionsWin from .camera import Camera, CameraControl, getCameraDevices from .camlabel", "Open Camera '%s'.\" % cam.name) self.camControl = None return self.setCamera(None) # Camera opened", "self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz,", "import QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox,", "hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w)", "cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras for i in range(len(self.cameras)): f", "logging import os import glob import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\")", "Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False)", "loadCameras(self): # Clear old shortcuts for action in self.camActions: self.removeAction(action) # Read camera", "vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear", "helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox = QVBoxLayout()", "return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int)", "for cameras for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self,", "cameras # Update our camera combo box items = [\"None\"] + [cam.name for", "CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import", "cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open", "self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def", "\"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under WTFPL. See", "self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False self.camControl = None", "path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" )", "fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save", "def __init__(self, parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action)", "combo box items = [\"None\"] + [cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items)", "self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self):", "= Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction,", "\"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction = Action(self, \"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False)", "if self.camControl: self.camControl.stopGrab() self.camControl = None # Show blank image if we don't", "fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction =", "image: return logging.debug(\"saving '%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if", "False return True # Try to open camera try: if not cam.open(): raise", "from .camera import Camera, CameraControl, getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore import", "\"Save As...\", self.on_saveAsAction, \"Ctrl+Shift+S\", False) fileMenu.addAction(self.saveAsAction) fileMenu.addAction(Action(self, \"Quit\", lambda: self.close(), \"Ctrl+Q\")) toolsMenu =", "if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image '%s'.\" %", "self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras for i in range(len(self.cameras)): f =", "(*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{}", "w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox)", "files path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda", "False return True @pyqtSlot(int) def cameraChangedSlot(self, index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot()", "# Try to open camera try: if not cam.open(): raise Exception() self.camControl =", "[cam.name for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras for", "text, action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow):", "# Read camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn", "import logging import os import glob import sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),", "2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under WTFPL. See COPYING file", "cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\" % cam.name)", "_data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent, text, action, shortcut=None, enabled=True):", "path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018", "Camera\", \"Couldn't Open Camera '%s'.\" % cam.name) self.camControl = None return self.setCamera(None) #", "i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f, \"Ctrl+%d\"", "QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching: return False self.switching = True self.snapAction.setEnabled(False)", "Add shortcuts for cameras for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex, i) action", "= self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image", "# Show blank image if we don't have camera if cam == None:", "% i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False)", "in self.camActions: self.removeAction(action) # Read camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras =", "self.switching = False return True # Try to open camera try: if not", "in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras for i in range(len(self.cameras)):", "class MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox", ") def setCamera(self, cam): if self.switching: return False self.switching = True self.snapAction.setEnabled(False) #", "super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving", "QComboBox, QLineEdit import functools import logging import os import glob import sys import", "try: if not cam.open(): raise Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except:", "self.cameras = [] self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit()", "functools.partial(self.cameraComboBox.setCurrentIndex, i) action = Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def", "vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit)", "self.switching = False self.camControl = None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow()", "= os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name)", "# Camera opened successfully self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int) def cameraChangedSlot(self,", "sys import time _data_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"data\") class Action(QAction): def __init__(self, parent, text,", "= self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About", "QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools import logging import os import glob", "Read camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn in", "self.camControl = None return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching = False", "= QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel)", "image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't", "[None] + cameras # Update our camera combo box items = [\"None\"] +", "self.camControl = None # Show blank image if we don't have camera if", "createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\",", "= QLineEdit() self.switching = False self.camControl = None self.camLabel = CamLabel(self) self.imgLabel =", "path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam:", "= None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self):", "if we don't have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching", "action = Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self): if", "createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\")", "super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options", "hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel)", "self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox() self.camActions = []", "\"Couldn't Save Image\", \"Couldn't Save Image '%s'.\" % path) def on_saveAction(self): path =", "Exception() self.camControl = CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't", "QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import", "\"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def", "blank image if we don't have camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\",", "= os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self, path): image = self.imgLabel.image()", "= [Camera.fromJSON(fn) for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None] +", "\"img\", \"camera.png\"))) self.switching = False return True # Try to open camera try:", "None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return True # Try to open", "OptionsWin from .camera import Camera, CameraControl, getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore", "parent, text, action, shortcut=None, enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class", "= self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\" % path) image = image.mirrored(", "import data_path from .optionswin import OptionsWin from .camera import Camera, CameraControl, getCameraDevices from", "shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot)", "camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn) for fn in glob.glob(path)]", "self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() )", "self.cameras = [None] + cameras # Update our camera combo box items =", "QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" ) if path[0]:", "enabled=True): super().__init__(text, parent) if shortcut: self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__()", "helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self):", "for cam in cameras] self.cameraComboBox.clear() self.cameraComboBox.addItems(items) # Add shortcuts for cameras for i", ".app import data_path from .optionswin import OptionsWin from .camera import Camera, CameraControl, getCameraDevices", "= self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction, \"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self,", "fn def saveImage(self, path): image = self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\"", "self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot()", "<a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed under WTFPL. See COPYING file for", "import OptionsWin from .camera import Camera, CameraControl, getCameraDevices from .camlabel import CamLabel from", "Clear old shortcuts for action in self.camActions: self.removeAction(action) # Read camera files path", "self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def setCamera(self, cam): if self.switching:", "None # Show blank image if we don't have camera if cam ==", "def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True)", "PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout,", "def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show() def createMenu(self): self.mainMenu = self.menuBar() fileMenu =", "self.camActions: self.removeAction(action) # Read camera files path = os.path.join(self.options.cfgPath, \"*.json\") cameras = [Camera.fromJSON(fn)", "camera combo box items = [\"None\"] + [cam.name for cam in cameras] self.cameraComboBox.clear()", "def saveImage(self, path): image = self.imgLabel.image() if not image: return logging.debug(\"saving '%s'\" %", "self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\")) hbox.addWidget(self.folderLineEdit) self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\")))", "Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path = os.path.join(", "vertical=self.options.flipVert ) if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image", "self.switching: return False self.switching = True self.snapAction.setEnabled(False) # Stop the current camera if", "self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching = False", "cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return True # Try", "\"This software is licensed under WTFPL. See COPYING file for details.<br>\" ) QMessageBox.about(", "CameraControl, getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import", "QPushButton, QComboBox, QLineEdit import functools import logging import os import glob import sys", "def createMenu(self): self.mainMenu = self.menuBar() fileMenu = self.mainMenu.addMenu(\"File\") self.snapAction = Action(self, \"Snap\", self.on_snapAction,", "= QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" ) if", "import CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage, QIcon from PyQt5.QtWidgets", "self.on_saveAction() def savePath(self): path = os.path.join( self.options.outputPath, self.folderLineEdit.text().strip() ) if not os.path.exists(path): os.mkdir(path)", "\"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts for action in", "\"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is licensed", "self.folderLineEdit.returnPressed.connect(lambda: self.folderLineEdit.clearFocus()) vbox.addLayout(hbox) self.setCamera(None) vbox.addWidget(self.camLabel) self.imgLabel.setImage(QImage(os.path.join(data_path, \"img\", \"images.png\"))) vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self):", "CameraControl(self, self.camLabel, cam) self.camControl.startGrab() except: QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\"", "for details.<br>\" ) QMessageBox.about( self, \"About %s\" % QApplication.applicationName(), msg.format(QApplication.applicationName(), QApplication.applicationVersion()) ) def", "False self.camControl = None self.camLabel = CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout()", "self, \"Save Image\", self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0])", "= None return self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching = False return", "vbox.addWidget(self.imgLabel) self.setCentralWidget(w) self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts for action in self.camActions:", "opened successfully self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int) def cameraChangedSlot(self, index): if", "shortcuts for action in self.camActions: self.removeAction(action) # Read camera files path = os.path.join(self.options.cfgPath,", "self.saveImage(path[0]) def on_aboutAction(self): msg = ( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a", "close(self): if self.camControl: self.camControl.stopGrab() super().close() def on_snapAction(self): self.snapAction.setEnabled(False) image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True)", "PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox,", "helpMenu.addAction(Action(self, \"About\", self.on_aboutAction)) helpMenu.addAction(Action(self, \"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget()", ".camlabel import CamLabel from PyQt5.QtCore import pyqtSlot from PyQt5.QtGui import QImage, QIcon from", "os.path.join(path, \"%d.jpg\" % int(time.time()*1000)) return fn def saveImage(self, path): image = self.imgLabel.image() if", "\"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self, \"About\", self.on_aboutAction))", "( \"{} v{}<br>\" \"<br>\" \"Copyright (C) 2018 <a href=\\\"mailto:<EMAIL>\\\"><NAME></a><br>\" \"<br>\" \"This software is", "\"Space\", False) fileMenu.addAction(self.snapAction) self.saveAction = Action(self, \"Save\", self.on_saveAction, \"Ctrl+S\", False) fileMenu.addAction(self.saveAction) self.saveAsAction =", "self.restoreGeometry(self.options.geometry) def loadCameras(self): # Clear old shortcuts for action in self.camActions: self.removeAction(action) #", "under WTFPL. See COPYING file for details.<br>\" ) QMessageBox.about( self, \"About %s\" %", "QMessageBox.critical(self, \"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\" % cam.name) self.camControl = None", "\"Couldn't Open Camera\", \"Couldn't Open Camera '%s'.\" % cam.name) self.camControl = None return", "CamLabel(self) self.imgLabel = CamLabel(self) self.createWindow() self.createMenu() self.createLayout() def createWindow(self): self.setWindowIcon(QIcon(os.path.join(data_path, \"img\", \"icon.png\"))) self.show()", "# Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave: self.on_saveAction() def savePath(self): path =", ") if not image.save(path, quality=100): QMessageBox.critical(self, \"Couldn't Save Image\", \"Couldn't Save Image '%s'.\"", "= QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch()", "index): if index >= 0: self.setCamera(self.cameras[index]) @pyqtSlot() def saveOptionsSlot(self): self.loadCameras() def closeEvent(self, e):", "image = self.camControl.snapshot() self.imgLabel.setImage(image) self.snapAction.setEnabled(True) # Enable image saving self.saveAction.setEnabled(True) self.saveAsAction.setEnabled(True) if self.options.autoSave:", "lambda: self.close(), \"Ctrl+Q\")) toolsMenu = self.mainMenu.addMenu(\"Tools\") toolsMenu.addAction(Action(self, \"Options\", self.options.show)) helpMenu = self.mainMenu.addMenu(\"Help\") helpMenu.addAction(Action(self,", "import Camera, CameraControl, getCameraDevices from .camlabel import CamLabel from PyQt5.QtCore import pyqtSlot from", "QImage, QIcon from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog,", "File (*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg = (", "super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = [] self.cameraComboBox = QComboBox() self.camActions =", "self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras = []", "is licensed under WTFPL. See COPYING file for details.<br>\" ) QMessageBox.about( self, \"About", "self.switching = True self.snapAction.setEnabled(False) # Stop the current camera if self.camControl: self.camControl.stopGrab() self.camControl", "self.snapAction.setEnabled(False) # Stop the current camera if self.camControl: self.camControl.stopGrab() self.camControl = None #", "\"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self): if self.camControl: self.camControl.stopGrab() super().close()", "QMainWindow, QAction, QVBoxLayout, QHBoxLayout, QMessageBox, QFileDialog, QLabel, QPushButton, QComboBox, QLineEdit import functools import", "# Update our camera combo box items = [\"None\"] + [cam.name for cam", "self.setShortcut(shortcut) self.triggered.connect(action) self.setEnabled(enabled) class MainWin(QMainWindow): def __init__(self): super().__init__() self.options = OptionsWin(self) self.options.saveSignal.connect(self.saveOptionsSlot) self.cameras", "% int(time.time()*1000)) return fn def saveImage(self, path): image = self.imgLabel.image() if not image:", "self.options.outputPath, \"Image File (*.png *.jpg *.bmp)\" ) if path[0]: self.saveImage(path[0]) def on_aboutAction(self): msg", "functools import logging import os import glob import sys import time _data_path =", "i) action = Action(self, \"\", f, \"Ctrl+%d\" % i) self.addAction(action) self.camActions.append(action) def close(self):", "lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox =", "cam): if self.switching: return False self.switching = True self.snapAction.setEnabled(False) # Stop the current", "False self.switching = True self.snapAction.setEnabled(False) # Stop the current camera if self.camControl: self.camControl.stopGrab()", "path = self.savePath() self.saveImage(path) def on_saveAsAction(self): path = QFileDialog.getSaveFileName( self, \"Save Image\", self.options.outputPath,", "self.folderLineEdit = QLineEdit() self.switching = False self.camControl = None self.camLabel = CamLabel(self) self.imgLabel", "self.cameraComboBox.addItems(items) # Add shortcuts for cameras for i in range(len(self.cameras)): f = functools.partial(self.cameraComboBox.setCurrentIndex,", "\"About Qt\", lambda: QMessageBox.aboutQt(self))) def createLayout(self): w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox)", "= [] self.cameraComboBox = QComboBox() self.camActions = [] self.loadCameras() self.folderLineEdit = QLineEdit() self.switching", "int(time.time()*1000)) return fn def saveImage(self, path): image = self.imgLabel.image() if not image: return", "for fn in glob.glob(path)] cameras.sort(key=lambda cam: cam.name) self.cameras = [None] + cameras #", "'%s'\" % path) image = image.mirrored( horizontal=self.options.flipHoriz, vertical=self.options.flipVert ) if not image.save(path, quality=100):", ".optionswin import OptionsWin from .camera import Camera, CameraControl, getCameraDevices from .camlabel import CamLabel", "QWidget() vbox = QVBoxLayout() w.setLayout(vbox) hbox = QHBoxLayout() hbox.addWidget(QLabel(\"Camera:\")) self.cameraComboBox.currentIndexChanged.connect(self.cameraChangedSlot) hbox.addWidget(self.cameraComboBox) hbox.addStretch() hbox.addWidget(QLabel(\"Folder:\"))", "self.setCamera(None) # Camera opened successfully self.snapAction.setEnabled(True) self.switching = False return True @pyqtSlot(int) def", "def loadCameras(self): # Clear old shortcuts for action in self.camActions: self.removeAction(action) # Read", "camera if cam == None: self.camLabel.setImage(QImage(os.path.join(data_path, \"img\", \"camera.png\"))) self.switching = False return True" ]
[]
[ "# Do basic addition with tensors o1 = pa + pb o2 =", "arguments for targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with", "= sess.run(result, feed_dict={ pa: [1., 1.], pb: [0., 1.], pc: [1., 5.] })", "pc) with tf.Session() as sess: # Run the graph through the session feeding", "o1 = pa + pb o2 = pa + pc simple_graph_output = o1", "= tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc):", "targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa", "[2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc): # Do", "as sess: # Run the graph through the session feeding it an arbitrary", "dictionary result = sess.run(result, feed_dict={ pa: [1., 1.], pb: [0., 1.], pc: [1.,", "the IPU cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa =", "graph through the session feeding it an arbitrary dictionary result = sess.run(result, feed_dict={", "tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting the IPU cfg =", "with tensors o1 = pa + pb o2 = pa + pc simple_graph_output", "pb o2 = pa + pc simple_graph_output = o1 + o2 return simple_graph_output", "simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with tf.Session() as sess: #", "cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2],", "IPU cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32,", "from tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior()", "Do basic addition with tensors o1 = pa + pb o2 = pa", "pb, pc): # Do basic addition with tensors o1 = pa + pb", "cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\")", "Configure arguments for targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system()", "name=\"c\") def basic_graph(pa, pb, pc): # Do basic addition with tensors o1 =", "with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with tf.Session() as sess: # Run", "simple_graph_output = o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb,", "import numpy as np from tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf from", "tf.disable_v2_behavior() # Configure arguments for targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus =", "[2], name=\"c\") def basic_graph(pa, pb, pc): # Do basic addition with tensors o1", "from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting the IPU cfg", "tensors o1 = pa + pb o2 = pa + pc simple_graph_output =", "addition with tensors o1 = pa + pb o2 = pa + pc", "tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting the IPU", "pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc): # Do basic addition", "IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus", "it an arbitrary dictionary result = sess.run(result, feed_dict={ pa: [1., 1.], pb: [0.,", "pb, pc) with tf.Session() as sess: # Run the graph through the session", "[2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def", "as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting the", "o2 = pa + pc simple_graph_output = o1 + o2 return simple_graph_output with", "the graph through the session feeding it an arbitrary dictionary result = sess.run(result,", "an arbitrary dictionary result = sess.run(result, feed_dict={ pa: [1., 1.], pb: [0., 1.],", "pa + pb o2 = pa + pc simple_graph_output = o1 + o2", "sess: # Run the graph through the session feeding it an arbitrary dictionary", "sess.run(result, feed_dict={ pa: [1., 1.], pb: [0., 1.], pc: [1., 5.] }) print(result)", "tf.Session() as sess: # Run the graph through the session feeding it an", "pa = tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32,", "name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc): # Do basic", "arbitrary dictionary result = sess.run(result, feed_dict={ pa: [1., 1.], pb: [0., 1.], pc:", "for targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"):", "import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments for", "= basic_graph(pa, pb, pc) with tf.Session() as sess: # Run the graph through", "tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() #", "tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting", "as np from tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import", "<filename>tensorflow1/basic-graph.py import numpy as np from tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf", "= o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc)", "pa + pc simple_graph_output = o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result", "return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with tf.Session() as sess:", "pc simple_graph_output = o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa,", "name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa,", "IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb", "ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with tf.Session() as sess: # Run the", "tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc =", "= tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc): # Do basic addition with", "= tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2],", "o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with tf.Session() as", "feeding it an arbitrary dictionary result = sess.run(result, feed_dict={ pa: [1., 1.], pb:", "pb = tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb,", "o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with", "= 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32,", "tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc): # Do basic addition with tensors", "tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\")", "cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb =", "basic_graph(pa, pb, pc): # Do basic addition with tensors o1 = pa +", "import IPUConfig tf.disable_v2_behavior() # Configure arguments for targeting the IPU cfg = IPUConfig()", "def basic_graph(pa, pb, pc): # Do basic addition with tensors o1 = pa", "result = sess.run(result, feed_dict={ pa: [1., 1.], pb: [0., 1.], pc: [1., 5.]", "numpy as np from tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config", "session feeding it an arbitrary dictionary result = sess.run(result, feed_dict={ pa: [1., 1.],", "np from tensorflow.python.ipu.scopes import ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig", "+ o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result = basic_graph(pa, pb, pc) with tf.Session()", "with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2], name=\"b\") pc", "basic addition with tensors o1 = pa + pb o2 = pa +", "Run the graph through the session feeding it an arbitrary dictionary result =", "pc): # Do basic addition with tensors o1 = pa + pb o2", "= IPUConfig() cfg.auto_select_ipus = 1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\")", "with tf.Session() as sess: # Run the graph through the session feeding it", "+ pc simple_graph_output = o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"): result =", "basic_graph(pa, pb, pc) with tf.Session() as sess: # Run the graph through the", "through the session feeding it an arbitrary dictionary result = sess.run(result, feed_dict={ pa:", "= pa + pb o2 = pa + pc simple_graph_output = o1 +", "import ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure", "tf.placeholder(np.float32, [2], name=\"b\") pc = tf.placeholder(np.float32, [2], name=\"c\") def basic_graph(pa, pb, pc): #", "# Run the graph through the session feeding it an arbitrary dictionary result", "1 cfg.configure_ipu_system() with tf.device(\"cpu\"): pa = tf.placeholder(np.float32, [2], name=\"a\") pb = tf.placeholder(np.float32, [2],", "ipu_scope import tensorflow.compat.v1 as tf from tensorflow.python.ipu.config import IPUConfig tf.disable_v2_behavior() # Configure arguments", "result = basic_graph(pa, pb, pc) with tf.Session() as sess: # Run the graph", "# Configure arguments for targeting the IPU cfg = IPUConfig() cfg.auto_select_ipus = 1", "+ pb o2 = pa + pc simple_graph_output = o1 + o2 return", "= pa + pc simple_graph_output = o1 + o2 return simple_graph_output with ipu_scope(\"/device:IPU:0\"):", "the session feeding it an arbitrary dictionary result = sess.run(result, feed_dict={ pa: [1.," ]
[ "new_pt(*values): return np.array(values or (0, 0, 0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec))", "'assets', name) def new_pt(*values): return np.array(values or (0, 0, 0), dtype=float) def vec_mag(vec:", "= getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values): return", "self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound() @staticmethod def", "or (0, 0, 0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array,", "'_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values): return np.array(values or", "True timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size / (sound.frequency * sound.bitrate /", "'.') def get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values): return np.array(values or (0,", "assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values):", "name) def new_pt(*values): return np.array(values or (0, 0, 0), dtype=float) def vec_mag(vec: np.array):", "threading import Timer import numpy as np from openal.audio import SoundSource assets_root =", "0, 0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array):", "0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return", "sys from os.path import join from threading import Timer import numpy as np", "dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return vec_mag(a", "vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound(): sound =", "- b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound(): sound = sound_generator()", "sound_generator): super().__init__() def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound),", "return np.array(values or (0, 0, 0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def", "np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return vec_mag(a - b) class", "import sys from os.path import join from threading import Timer import numpy as", "timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound() @staticmethod def calc_length(sound): return", "def __init__(self, sound_generator): super().__init__() def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer", "import SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets', name)", "SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets', name) def", "__init__(self, sound_generator): super().__init__() def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer =", "def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon", "os.path import join from threading import Timer import numpy as np from openal.audio", "b: np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def", "super().__init__() def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound)", "Timer import numpy as np from openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS',", "= sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start()", "play_sound) timer.daemon = True timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size / (sound.frequency", "timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size / (sound.frequency * sound.bitrate / 8)", "= timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound() @staticmethod def calc_length(sound):", "np.array(values or (0, 0, 0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a:", "self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound() @staticmethod", "from openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root,", "return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound(): sound", "b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound(): sound = sound_generator() self.queue(sound)", "class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer']", "def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return vec_mag(a -", "vec_dist(a: np.array, b: np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator):", "(0, 0, 0), dtype=float) def vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b:", "vec_mag(vec: np.array): return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return vec_mag(a - b)", "def vec_dist(a: np.array, b: np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self,", "np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound():", "from threading import Timer import numpy as np from openal.audio import SoundSource assets_root", "np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def", "import Timer import numpy as np from openal.audio import SoundSource assets_root = getattr(sys,", "import join from threading import Timer import numpy as np from openal.audio import", "join(assets_root, 'assets', name) def new_pt(*values): return np.array(values or (0, 0, 0), dtype=float) def", "def new_pt(*values): return np.array(values or (0, 0, 0), dtype=float) def vec_mag(vec: np.array): return", "import numpy as np from openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS', '.')", "join from threading import Timer import numpy as np from openal.audio import SoundSource", "getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values): return np.array(values", "return join(assets_root, 'assets', name) def new_pt(*values): return np.array(values or (0, 0, 0), dtype=float)", "play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon =", "sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound()", "np from openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name): return", "get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values): return np.array(values or (0, 0, 0),", "Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size /", "ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__() def play_sound(): sound = sound_generator() self.queue(sound) self.__dict__['timer'] =", "np.array, b: np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource): def __init__(self, sound_generator): super().__init__()", "openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name): return join(assets_root, 'assets',", "= Timer(self.calc_length(sound), play_sound) timer.daemon = True timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size", "def get_sfx(name): return join(assets_root, 'assets', name) def new_pt(*values): return np.array(values or (0, 0,", "timer.daemon = True timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size / (sound.frequency *", "numpy as np from openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def", "return np.sqrt(vec.dot(vec)) def vec_dist(a: np.array, b: np.array): return vec_mag(a - b) class ContinuousSoundSource(SoundSource):", "sound = sound_generator() self.queue(sound) self.__dict__['timer'] = timer = Timer(self.calc_length(sound), play_sound) timer.daemon = True", "as np from openal.audio import SoundSource assets_root = getattr(sys, '_MEIPASS', '.') def get_sfx(name):", "from os.path import join from threading import Timer import numpy as np from", "= True timer.start() play_sound() @staticmethod def calc_length(sound): return sound.size / (sound.frequency * sound.bitrate" ]
[ "(str): Brand owner for the food require_all_words (bool): When True, the search will", "d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class", "__init__(self, _dict_: dict = None, **kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_) return", "field page_number (int): The page of results to return sort_field (SortField): The name", ") def __init__(self, _dict_: dict = None, **kwargs): if _dict_ is not None:", "= 'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE =", "types to include in search ingredients: The list of ingredients (as it appears", "https://fdc.nal.usda.gov/api-guide.html#food-search-endpoint \"\"\" from typing import Dict, Union from datatrans import utils from datatrans.utils.classes", "The page of results to return sort_field (SortField): The name of the field", "__slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ =", "= 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId' class SortDirection(Enum):", "the food require_all_words (bool): When True, the search will only return foods contain", "field by which to sort sort_direction (SortDirection): The direction of the sorting \"\"\"", "sort_direction (SortDirection): The direction of the sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types',", "if _dict_ is not None: super().__init__(_dict_=_dict_) return for k, v in kwargs.items(): if", "verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection),", "search field page_number (int): The page of results to return sort_field (SortField): The", "LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE", "import utils from datatrans.utils.classes import JSONEnum as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection',", "bool]): Specific data types to include in search ingredients: The list of ingredients", "'fdcId' class SortDirection(Enum): ASC = 'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str],", "return for k, v in kwargs.items(): if k in self.__slots__: kwargs[utils.snake_to_camel(k)] = kwargs.pop(k)", "= 'fdcId' class SortDirection(Enum): ASC = 'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType,", "_dict_: dict = None, **kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_) return for", "food require_all_words (bool): When True, the search will only return foods contain all", "the search will only return foods contain all of the words that were", "False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData", "k, v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value:", "dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction',", "super().__init__(_dict_=_dict_) return for k, v in kwargs.items(): if k in self.__slots__: kwargs[utils.snake_to_camel(k)] =", "(general text) included_data_types (Dict[str, bool]): Specific data types to include in search ingredients:", "JSONEnum as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION =", "Central search criteria. Attributes: general_search_input (str): Search query (general text) included_data_types (Dict[str, bool]):", "( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input',", "d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria. Attributes: general_search_input", "FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents", "FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria. Attributes: general_search_input (str): Search query (general", "require_all_words (bool): When True, the search will only return foods contain all of", "('sort_direction', SortDirection), ) def __init__(self, _dict_: dict = None, **kwargs): if _dict_ is", "= 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION", "\"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__", "'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types),", "dict = None, **kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_) return for k,", "(SortField): The name of the field by which to sort sort_direction (SortDirection): The", "from datatrans import utils from datatrans.utils.classes import JSONEnum as Enum __all__ = ['FoodDataType',", "def __init__(self, _dict_: dict = None, **kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_)", "sort sort_direction (SortDirection): The direction of the sorting \"\"\" __slots__ = ( 'general_search_input',", "('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), )", "FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY = 'SR", "datatrans.utils.classes import JSONEnum as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum):", "entered in the search field page_number (int): The page of results to return", "criteria. Attributes: general_search_input (str): Search query (general text) included_data_types (Dict[str, bool]): Specific data", "('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField),", "FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), }", "all of the words that were entered in the search field page_number (int):", "DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId' class", "SortDirection(Enum): ASC = 'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d", "return sort_field (SortField): The name of the field by which to sort sort_direction", "'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v for k, v", "text) included_data_types (Dict[str, bool]): Specific data types to include in search ingredients: The", "(FNDDS)' BRANDED = 'Branded' LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword'", "brand_owner (str): Brand owner for the food require_all_words (bool): When True, the search", "['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)'", "(int): The page of results to return sort_field (SortField): The name of the", "('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int),", "= 'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC = 'asc' DESC = 'desc'", "import Dict, Union from datatrans import utils from datatrans.utils.classes import JSONEnum as Enum", "sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction')", "= 'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k):", "(Dict[str, bool]): Specific data types to include in search ingredients: The list of", "the product label) brand_owner (str): Brand owner for the food require_all_words (bool): When", "The list of ingredients (as it appears on the product label) brand_owner (str):", "were entered in the search field page_number (int): The page of results to", "(str): Search query (general text) included_data_types (Dict[str, bool]): Specific data types to include", "('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self,", "'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients',", "owner for the food require_all_words (bool): When True, the search will only return", "\"\"\"Represents a FoodData Central search criteria. Attributes: general_search_input (str): Search query (general text)", "FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY =", "__all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY =", "page_number (int): The page of results to return sort_field (SortField): The name of", "'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str),", "d = {FoodDataType(k): v for k, v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION,", "int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_: dict = None, **kwargs):", "'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY = 'SR Legacy' class", "class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY", "} class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria. Attributes: general_search_input (str): Search", "(SortDirection): The direction of the sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients',", "'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner',", "def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v for k, v in", "for k, v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False),", "'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED =", "v for k, v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY,", "list of ingredients (as it appears on the product label) brand_owner (str): Brand", "as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation'", "= None, **kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_) return for k, v", "for k, v in kwargs.items(): if k in self.__slots__: kwargs[utils.snake_to_camel(k)] = kwargs.pop(k) super().__init__(_dict_=kwargs)", "from typing import Dict, Union from datatrans import utils from datatrans.utils.classes import JSONEnum", "d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search", "name of the field by which to sort sort_direction (SortDirection): The direction of", "will only return foods contain all of the words that were entered in", "\"\"\" from typing import Dict, Union from datatrans import utils from datatrans.utils.classes import", "Search query (general text) included_data_types (Dict[str, bool]): Specific data types to include in", "__attr__ = ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words',", "= 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY = 'SR Legacy'", "The name of the field by which to sort sort_direction (SortDirection): The direction", "ingredients (as it appears on the product label) brand_owner (str): Brand owner for", "= 'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC = 'asc'", "direction of the sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words',", "None, **kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_) return for k, v in", "SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded' LEGACY = 'SR Legacy' class SortField(Enum):", "'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v", "'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED", "bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_: dict =", "'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC = 'asc' DESC = 'desc' def", "on the product label) brand_owner (str): Brand owner for the food require_all_words (bool):", "Dict, Union from datatrans import utils from datatrans.utils.classes import JSONEnum as Enum __all__", "in the search field page_number (int): The page of results to return sort_field", "search criteria. Attributes: general_search_input (str): Search query (general text) included_data_types (Dict[str, bool]): Specific", "verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v for k, v in d.items()}", "class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID =", "by which to sort sort_direction (SortDirection): The direction of the sorting \"\"\" __slots__", "\"\"\" References: https://fdc.nal.usda.gov/api-guide.html#food-search-endpoint \"\"\" from typing import Dict, Union from datatrans import utils", "= 'Branded' LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE =", "SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_: dict = None, **kwargs): if _dict_", "When True, the search will only return foods contain all of the words", "search will only return foods contain all of the words that were entered", "of results to return sort_field (SortField): The name of the field by which", "'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types', dict,", "References: https://fdc.nal.usda.gov/api-guide.html#food-search-endpoint \"\"\" from typing import Dict, Union from datatrans import utils from", "{ FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False),", "Brand owner for the food require_all_words (bool): When True, the search will only", "Union from datatrans import utils from datatrans.utils.classes import JSONEnum as Enum __all__ =", "foods contain all of the words that were entered in the search field", "False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria.", "'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey (FNDDS)' BRANDED = 'Branded'", "FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central", "general_search_input (str): Search query (general text) included_data_types (Dict[str, bool]): Specific data types to", "v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED,", "page of results to return sort_field (SortField): The name of the field by", "not None: super().__init__(_dict_=_dict_) return for k, v in kwargs.items(): if k in self.__slots__:", "class SortDirection(Enum): ASC = 'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]):", "ASC = 'asc' DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d =", "data types to include in search ingredients: The list of ingredients (as it", "results to return sort_field (SortField): The name of the field by which to", "Attributes: general_search_input (str): Search query (general text) included_data_types (Dict[str, bool]): Specific data types", "the sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field',", "'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types',", "return foods contain all of the words that were entered in the search", "None: super().__init__(_dict_=_dict_) return for k, v in kwargs.items(): if k in self.__slots__: kwargs[utils.snake_to_camel(k)]", "query (general text) included_data_types (Dict[str, bool]): Specific data types to include in search", "product label) brand_owner (str): Brand owner for the food require_all_words (bool): When True,", "of the sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number',", "= ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool),", "contain all of the words that were entered in the search field page_number", "str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field',", "(as it appears on the product label) brand_owner (str): Brand owner for the", "BRANDED = 'Branded' LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE", "appears on the product label) brand_owner (str): Brand owner for the food require_all_words", "in search ingredients: The list of ingredients (as it appears on the product", "( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str), ('require_all_words', bool), ('page_number',", "bool]): d = {FoodDataType(k): v for k, v in d.items()} return { FoodDataType.FOUNDATION.value:", "'Survey (FNDDS)' BRANDED = 'Branded' LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION =", "which to sort sort_direction (SortDirection): The direction of the sorting \"\"\" __slots__ =", "<reponame>KooCook/datatrans \"\"\" References: https://fdc.nal.usda.gov/api-guide.html#food-search-endpoint \"\"\" from typing import Dict, Union from datatrans import", "{FoodDataType(k): v for k, v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value:", "label) brand_owner (str): Brand owner for the food require_all_words (bool): When True, the", "('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_: dict = None,", "the words that were entered in the search field page_number (int): The page", "SortDirection), ) def __init__(self, _dict_: dict = None, **kwargs): if _dict_ is not", "'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = ( ('general_search_input', str),", "Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY", "only return foods contain all of the words that were entered in the", "to include in search ingredients: The list of ingredients (as it appears on", "for the food require_all_words (bool): When True, the search will only return foods", "= ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION = 'Foundation' SURVEY = 'Survey", "of ingredients (as it appears on the product label) brand_owner (str): Brand owner", "in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False),", "_dict_ is not None: super().__init__(_dict_=_dict_) return for k, v in kwargs.items(): if k", "('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_: dict = None, **kwargs): if", "= 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v for k,", "words that were entered in the search field page_number (int): The page of", "False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass):", "return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY,", "typing import Dict, Union from datatrans import utils from datatrans.utils.classes import JSONEnum as", "PUBDATE = 'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC = 'asc' DESC =", "('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_: dict", "False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria. Attributes: general_search_input (str):", "utils from datatrans.utils.classes import JSONEnum as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria']", "FoodData Central search criteria. Attributes: general_search_input (str): Search query (general text) included_data_types (Dict[str,", "ingredients: The list of ingredients (as it appears on the product label) brand_owner", "= ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner', 'require_all_words', 'page_number', 'sort_field', 'sort_direction') __attr__ = (", "the search field page_number (int): The page of results to return sort_field (SortField):", "DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC =", "included_data_types (Dict[str, bool]): Specific data types to include in search ingredients: The list", "it appears on the product label) brand_owner (str): Brand owner for the food", "DESC = 'desc' def verify_included_data_types(d: Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v for", "FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria. Attributes:", "sort_field (SortField): The name of the field by which to sort sort_direction (SortDirection):", "str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def __init__(self, _dict_:", "to return sort_field (SortField): The name of the field by which to sort", "'Branded' LEGACY = 'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword'", "class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a FoodData Central search criteria. Attributes: general_search_input (str): Search query", "(bool): When True, the search will only return foods contain all of the", "Specific data types to include in search ingredients: The list of ingredients (as", "import JSONEnum as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class FoodDataType(Enum): FOUNDATION", "= {FoodDataType(k): v for k, v in d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False),", "True, the search will only return foods contain all of the words that", "of the words that were entered in the search field page_number (int): The", "Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID", "Dict[Union[FoodDataType, str], bool]): d = {FoodDataType(k): v for k, v in d.items()} return", "that were entered in the search field page_number (int): The page of results", "str], bool]): d = {FoodDataType(k): v for k, v in d.items()} return {", "'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC", "d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value: d.pop(FoodDataType.LEGACY, False), } class FoodSearchCriteria(utils.DataClass): \"\"\"Represents a", "a FoodData Central search criteria. Attributes: general_search_input (str): Search query (general text) included_data_types", "datatrans import utils from datatrans.utils.classes import JSONEnum as Enum __all__ = ['FoodDataType', 'SortField',", "'sort_direction') __attr__ = ( ('general_search_input', str), ('included_data_types', dict, verify_included_data_types), ('ingredients', str), ('brand_owner', str),", "d.items()} return { FoodDataType.FOUNDATION.value: d.pop(FoodDataType.FOUNDATION, False), FoodDataType.SURVEY.value: d.pop(FoodDataType.SURVEY, False), FoodDataType.BRANDED.value: d.pop(FoodDataType.BRANDED, False), FoodDataType.LEGACY.value:", "is not None: super().__init__(_dict_=_dict_) return for k, v in kwargs.items(): if k in", "SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId'", "'SR Legacy' class SortField(Enum): DESCRIPTION = 'lowercaseDescription.keyword' DATATYPE = 'dataType.keyword' PUBDATE = 'publishedDate'", "the field by which to sort sort_direction (SortDirection): The direction of the sorting", "include in search ingredients: The list of ingredients (as it appears on the", "ID = 'fdcId' class SortDirection(Enum): ASC = 'asc' DESC = 'desc' def verify_included_data_types(d:", "search ingredients: The list of ingredients (as it appears on the product label)", "'dataType.keyword' PUBDATE = 'publishedDate' ID = 'fdcId' class SortDirection(Enum): ASC = 'asc' DESC", "**kwargs): if _dict_ is not None: super().__init__(_dict_=_dict_) return for k, v in kwargs.items():", "str), ('brand_owner', str), ('require_all_words', bool), ('page_number', int), ('sort_field', SortField), ('sort_direction', SortDirection), ) def", "The direction of the sorting \"\"\" __slots__ = ( 'general_search_input', 'included_data_types', 'ingredients', 'brand_owner',", "from datatrans.utils.classes import JSONEnum as Enum __all__ = ['FoodDataType', 'SortField', 'SortDirection', 'FoodSearchCriteria'] class", "of the field by which to sort sort_direction (SortDirection): The direction of the", "to sort sort_direction (SortDirection): The direction of the sorting \"\"\" __slots__ = (" ]
[ "cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3])", "H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1", "= cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8,", "= cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1", "moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10", "= cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 =", "= cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 =", "X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3", "moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4,", "i in range(length) for j in range(length)] print(qubits) circuit = cirq.Circuit() H1 =", "= cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 =", "cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2])", "GridQubit # creating circuit with 5 qubit length = 5 qubits = [cirq.GridQubit(i,", "= cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 =", "= cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation", "S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2])", "= cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 =", "moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11", "j) for i in range(length) for j in range(length)] print(qubits) circuit = cirq.Circuit()", "cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3,", "= cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 =", "qubit length = 5 qubits = [cirq.GridQubit(i, j) for i in range(length) for", "cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1])", "= cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1])", "= cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4", "cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3])", "5 qubits = [cirq.GridQubit(i, j) for i in range(length) for j in range(length)]", "= cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 =", "moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7,", "= cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9, moment10, moment11, moment12,", "cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4])", "C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 =", "moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13", "Circuit from cirq.devices import GridQubit # creating circuit with 5 qubit length =", "H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4", "= cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 =", "cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4])", "cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 =", "import GridQubit # creating circuit with 5 qubit length = 5 qubits =", "circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4", "= cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2,", "H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2", "cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1", "= cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit", "cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5])", "X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4", "moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12", "= 5 qubits = [cirq.GridQubit(i, j) for i in range(length) for j in", "H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5", "moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14", "cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4])", "cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1])", "cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2])", "cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9, moment10, moment11, moment12, moment13,", "cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4])", "for i in range(length) for j in range(length)] print(qubits) circuit = cirq.Circuit() H1", "moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7", "cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9,", "moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8", "= cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 =", "cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit =", "= cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 =", "qubits = [cirq.GridQubit(i, j) for i in range(length) for j in range(length)] print(qubits)", "cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6", "#circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9, moment10,", "#Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3])", "moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6", "moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9, moment10, moment11, moment12, moment13, moment14)) print(circuit)", "= cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 =", "cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 = cirq.Moment([X1]) moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3])", "= cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 =", "import cirq import numpy as np from cirq import Circuit from cirq.devices import", "circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9, moment10, moment11,", "cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3", "cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4])", "cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2])", "moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1,", "circuit with 5 qubit length = 5 qubits = [cirq.GridQubit(i, j) for i", "cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3])", "= cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 =", "C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap", "cirq.devices import GridQubit # creating circuit with 5 qubit length = 5 qubits", "H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2]) C3", "j in range(length)] print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1])", "cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3])", "= [cirq.GridQubit(i, j) for i in range(length) for j in range(length)] print(qubits) circuit", "from cirq import Circuit from cirq.devices import GridQubit # creating circuit with 5", "moment2, moment3, moment4, moment5 ,moment6 ,moment7, moment8, moment9, moment10, moment11, moment12, moment13, moment14))", "in range(length)] print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3", "= cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 =", "range(length) for j in range(length)] print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2", "[cirq.GridQubit(i, j) for i in range(length) for j in range(length)] print(qubits) circuit =", "cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1])", "import Circuit from cirq.devices import GridQubit # creating circuit with 5 qubit length", "for j in range(length)] print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2 =", "from cirq.devices import GridQubit # creating circuit with 5 qubit length = 5", "C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 =", "cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 =", "X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5", "cirq import Circuit from cirq.devices import GridQubit # creating circuit with 5 qubit", "= cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0])", "moment1 = cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5", "= cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 =", "cirq.Moment([H1]) moment2 = cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5])", "= cirq.Moment([H2]) moment3 = cirq.Moment([H3]) moment4 = cirq.Moment([H4]) moment5 = cirq.Moment([H5]) moment6 =", "X2 = cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1", "X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2", "= cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9 = cirq.Moment([S1]) moment10 =", "numpy as np from cirq import Circuit from cirq.devices import GridQubit # creating", "cirq.X(qubits[1]) X3 = cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1])", "with 5 qubit length = 5 qubits = [cirq.GridQubit(i, j) for i in", "= cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 =", "cirq import numpy as np from cirq import Circuit from cirq.devices import GridQubit", "C2 = cirq.CNOT(qubits[1],qubits[2]) C3 = cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4])", "import numpy as np from cirq import Circuit from cirq.devices import GridQubit #", "#swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2 = cirq.X(qubits[1]) X3 =", "= cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit circuit = cirq.Circuit((moment1, moment2, moment3, moment4, moment5", "# creating circuit with 5 qubit length = 5 qubits = [cirq.GridQubit(i, j)", "np from cirq import Circuit from cirq.devices import GridQubit # creating circuit with", "length = 5 qubits = [cirq.GridQubit(i, j) for i in range(length) for j", "in range(length) for j in range(length)] print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0])", "cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1]) C2 = cirq.CNOT(qubits[1],qubits[2])", "creating circuit with 5 qubit length = 5 qubits = [cirq.GridQubit(i, j) for", "= cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 =", "moment11 = cirq.Moment([X2]) moment12 = cirq.Moment([X3]) moment13 = cirq.Moment([X4]) moment14 = cirq.Moment([X5]) #circuit", "= cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 =", "cirq.H(qubits[1]) H3 = cirq.H(qubits[2]) H4 = cirq.H(qubits[3]) H5 = cirq.H(qubits[4]) C1 = cirq.CNOT(qubits[0],qubits[1])", "= cirq.X(qubits[2]) X4 = cirq.X(qubits[3]) X5 = cirq.X(qubits[4]) moment1 = cirq.Moment([H1]) moment2 =", "as np from cirq import Circuit from cirq.devices import GridQubit # creating circuit", "range(length)] print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 =", "print(qubits) circuit = cirq.Circuit() H1 = cirq.H(qubits[0]) H2 = cirq.H(qubits[1]) H3 = cirq.H(qubits[2])", "5 qubit length = 5 qubits = [cirq.GridQubit(i, j) for i in range(length)", "cirq.CNOT(qubits[2],qubits[3]) C4 = cirq.CNOT(qubits[3],qubits[4]) #swap S1 = cirq.SWAP(qubits[0],qubits[4]) #Rotation X1 = cirq.X(qubits[0]) X2", "moment5 = cirq.Moment([H5]) moment6 = cirq.Moment([C1]) moment7 = cirq.Moment([C2]) moment8 = cirq.Moment([C3]) moment9" ]
[ "\"\") # print('result_dic44444', result_dic) return result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\"", "(val, param_name) in args: if val is not None: items.append(val) elif hasattr(context, param_name):", "match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else: result_dic[def_key] = group_1", "print('result_dic44444', result_dic) return result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the", "-*- \"\"\" dsl helper \"\"\" import re import flybirds.utils.flybirds_log as log # generate", "] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else: result_dic[def_key] = group_1 else:", "log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get must and optional parameters \"\"\"", "return result def get_params(context, *args): \"\"\" Get param from context :param context: step", "\"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode,", "# generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj = re.match(functin_pattern,", "result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args):", "result_dic = {} match_obj = re.match(functin_pattern, dsl_params) if match_obj is not None: \"\"\"", "group_1) if match_obj_group_1 is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[", "\"\") return result def get_params(context, *args): \"\"\" Get param from context :param context:", "split_must_param(dsl_params): \"\"\" Get must and optional parameters \"\"\" result = dsl_params.split(\",\", 1) result[0]", "context :param context: step context :param args: A tuple containing value and parameter", "and parameter name :return: \"\"\" items = [] for (val, param_name) in args:", "use in subsequent processes \"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params,", "Convert the parameters in the dsl statement into dict format for use in", "def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get must and optional parameters", "1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context,", "return result_dic def split_must_param(dsl_params): \"\"\" Get must and optional parameters \"\"\" result =", "= result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args): \"\"\"", "result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else: result_dic[def_key] =", "result_dic def split_must_param(dsl_params): \"\"\" Get must and optional parameters \"\"\" result = dsl_params.split(\",\",", "( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1", "result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args): \"\"\" Get param from context :param", "items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items def return_value(value, def_value=None): \"\"\" get", "return items def return_value(value, def_value=None): \"\"\" get global attribute value \"\"\" if value", "re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic", "param_name): items.append(getattr(context, param_name)) return items def return_value(value, def_value=None): \"\"\" get global attribute value", "dsl_params) if match_obj is not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?,", "= re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return", "{} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic:", "def_key): result_dic = {} match_obj = re.match(functin_pattern, dsl_params) if match_obj is not None:", "verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match", "result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is not None: match_obj_group_1", "else: result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return", "generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj = re.match(functin_pattern, dsl_params)", "still met, split again, Until the split to the last item: text= \"\"\"", "match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are still met, split again,", "None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is not None: group_1 = (", "properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15)", "optional parameters \"\"\" result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] =", "[] for (val, param_name) in args: if val is not None: items.append(val) elif", "into dict format for use in subsequent processes \"\"\" result_dic = {} functin_pattern", "flybirds.utils.flybirds_log as log # generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {}", "again, Until the split to the last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\",", "format for use in subsequent processes \"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\")", "to the last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3)", "match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern,", "processes \"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic =", "if val is not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items", "def split_must_param(dsl_params): \"\"\" Get must and optional parameters \"\"\" result = dsl_params.split(\",\", 1)", "params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in the dsl statement into dict format", "= re.match(functin_pattern, group_1) if match_obj_group_1 is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\")", "the dsl statement into dict format for use in subsequent processes \"\"\" result_dic", "*args): \"\"\" Get param from context :param context: step context :param args: A", "as log # generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj", "= match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is not None: match_obj_group_1 =", "= add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get must", "conditions are still met, split again, Until the split to the last item:", "None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items def return_value(value, def_value=None): \"\"\"", "\"\"\" dsl helper \"\"\" import re import flybirds.utils.flybirds_log as log # generate result_dic", "statement into dict format for use in subsequent processes \"\"\" result_dic = {}", "from back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are", "tuple containing value and parameter name :return: \"\"\" items = [] for (val,", "= group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic #", "not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items def return_value(value, def_value=None):", "helper \"\"\" import re import flybirds.utils.flybirds_log as log # generate result_dic def add_res_dic(dsl_params,", "re.match(functin_pattern, group_1) while match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1", "match_obj_group_1 is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ]", "# -*- coding: utf-8 -*- \"\"\" dsl helper \"\"\" import re import flybirds.utils.flybirds_log", "add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get must and", "\"\"\" result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\")", "\"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params,", "param_name) in args: if val is not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context,", "= match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is", "result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get", "text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to front, match", "Match from back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions", "val is not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items def", "<reponame>LinuxSuRen/flybirds<filename>flybirds/utils/dsl_helper.py # -*- coding: utf-8 -*- \"\"\" dsl helper \"\"\" import re import", "the parameters in the dsl statement into dict format for use in subsequent", "else: result_dic[def_key] = group_1 break else: result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\",", "functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get must and optional", "split again, Until the split to the last item: text= \"\"\" group_1 =", "= {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key)", "= dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result", "utf-8 -*- \"\"\" dsl helper \"\"\" import re import flybirds.utils.flybirds_log as log #", "= ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] =", "None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else:", "= result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args): \"\"\" Get param from context", "import flybirds.utils.flybirds_log as log # generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic =", "if match_obj is not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true", "met, split again, Until the split to the last item: text= \"\"\" group_1", "= match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else: result_dic[def_key] = group_1 else: result_dic[def_key]", "\"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is not None:", "def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj = re.match(functin_pattern, dsl_params) if match_obj", "functin_pattern, def_key): result_dic = {} match_obj = re.match(functin_pattern, dsl_params) if match_obj is not", "textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back", "match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break", "result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in the", "result_dic[def_key] = group_1 break else: result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\")", "last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 =", "not None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is not None: group_1 =", "# print('result_dic44444', result_dic) return result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert", "for use in subsequent processes \"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if", "match_obj_group_1(text=freshmode, timeout=15) f the conditions are still met, split again, Until the split", "containing value and parameter name :return: \"\"\" items = [] for (val, param_name)", "functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic))", ":param args: A tuple containing value and parameter name :return: \"\"\" items =", "\"\"\" Get param from context :param context: step context :param args: A tuple", "step context :param args: A tuple containing value and parameter name :return: \"\"\"", "group_1) while match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is", "\"\"\" Convert the parameters in the dsl statement into dict format for use", "def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in the dsl statement into dict", "name :return: \"\"\" items = [] for (val, param_name) in args: if val", "not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40", "back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are still", "elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items def return_value(value, def_value=None): \"\"\" get global", "param_name)) return items def return_value(value, def_value=None): \"\"\" get global attribute value \"\"\" if", "re import flybirds.utils.flybirds_log as log # generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic", ":param context: step context :param args: A tuple containing value and parameter name", "\"\"\" items = [] for (val, param_name) in args: if val is not", "result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj = re.match(functin_pattern, dsl_params) if", "{} match_obj = re.match(functin_pattern, dsl_params) if match_obj is not None: \"\"\" senario: Flight,", "text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1)", "multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode,", "fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to front,", "match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern, group_1)", "\"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args): \"\"\" Get param", "group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic # generate", "group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1", "first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are still met, split again, Until the", "to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are still met,", "= group_1 break else: result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") #", "\"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else:", "dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"):", "if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def", ") result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else: result_dic[def_key]", "if match_obj_group_1 is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2)", "A tuple containing value and parameter name :return: \"\"\" items = [] for", "get global attribute value \"\"\" if value is not None: return value return", "result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic # generate result_dic def", "swipeCount=40 Match from back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the", "= dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic # generate result_dic def params_to_dic(dsl_params,", "re.match(functin_pattern, dsl_params) if match_obj is not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.?", "= re.match(functin_pattern, group_1) while match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern, group_1) if", "result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in the dsl statement into", "timeout=15, swipeCount=40 Match from back to front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f", "get_params(context, *args): \"\"\" Get param from context :param context: step context :param args:", "generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in the dsl statement", "\"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while", "timeout=15) f the conditions are still met, split again, Until the split to", "return result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in", "result def get_params(context, *args): \"\"\" Get param from context :param context: step context", "the split to the last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)]", "coding: utf-8 -*- \"\"\" dsl helper \"\"\" import re import flybirds.utils.flybirds_log as log", "dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def", "from context :param context: step context :param args: A tuple containing value and", "timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to front, match back", "value and parameter name :return: \"\"\" items = [] for (val, param_name) in", ".?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to", "Get must and optional parameters \"\"\" result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\",", "isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params):", "is not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return items def return_value(value,", "param from context :param context: step context :param args: A tuple containing value", "= [] for (val, param_name) in args: if val is not None: items.append(val)", "are still met, split again, Until the split to the last item: text=", "str): result_dic = add_res_dic(dsl_params, functin_pattern, def_key) log.info(\"result_dic: {}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\"", ":return: \"\"\" items = [] for (val, param_name) in args: if val is", "subsequent processes \"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic", "dsl helper \"\"\" import re import flybirds.utils.flybirds_log as log # generate result_dic def", "Get param from context :param context: step context :param args: A tuple containing", "not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3)", "result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return", "is not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15,", "parameters in the dsl statement into dict format for use in subsequent processes", "items def return_value(value, def_value=None): \"\"\" get global attribute value \"\"\" if value is", "the conditions are still met, split again, Until the split to the last", "def_key=\"selector\"): \"\"\" Convert the parameters in the dsl statement into dict format for", "log # generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj =", "item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern,", "must and optional parameters \"\"\" result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\")", "verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from", "result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args): \"\"\" Get param from", "def return_value(value, def_value=None): \"\"\" get global attribute value \"\"\" if value is not", "# generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters in the dsl", "result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\", \"\") return result def get_params(context, *args): \"\"\" Get", "\"\"\" Get must and optional parameters \"\"\" result = dsl_params.split(\",\", 1) result[0] =", "Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40", "parameter name :return: \"\"\" items = [] for (val, param_name) in args: if", "add_res_dic(dsl_params, functin_pattern, def_key): result_dic = {} match_obj = re.match(functin_pattern, dsl_params) if match_obj is", "\"\"\" import re import flybirds.utils.flybirds_log as log # generate result_dic def add_res_dic(dsl_params, functin_pattern,", "for (val, param_name) in args: if val is not None: items.append(val) elif hasattr(context,", "-*- coding: utf-8 -*- \"\"\" dsl helper \"\"\" import re import flybirds.utils.flybirds_log as", "items = [] for (val, param_name) in args: if val is not None:", "def get_params(context, *args): \"\"\" Get param from context :param context: step context :param", "match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\",", "items.append(getattr(context, param_name)) return items def return_value(value, def_value=None): \"\"\" get global attribute value \"\"\"", "while match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is not", "else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic # generate result_dic", "dict format for use in subsequent processes \"\"\" result_dic = {} functin_pattern =", "in the dsl statement into dict format for use in subsequent processes \"\"\"", "import re import flybirds.utils.flybirds_log as log # generate result_dic def add_res_dic(dsl_params, functin_pattern, def_key):", "front, match back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are still met, split", "is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] =", "context :param args: A tuple containing value and parameter name :return: \"\"\" items", "Until the split to the last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\")", "the last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1", "args: if val is not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name)) return", "{}\".format(result_dic)) return result_dic def split_must_param(dsl_params): \"\"\" Get must and optional parameters \"\"\" result", "hasattr(context, param_name): items.append(getattr(context, param_name)) return items def return_value(value, def_value=None): \"\"\" get global attribute", "in subsequent processes \"\"\" result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str):", "= {} match_obj = re.match(functin_pattern, dsl_params) if match_obj is not None: \"\"\" senario:", "split to the last item: text= \"\"\" group_1 = match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] =", "None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi", "senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode, timeout=15, swipeCount=40 multi properities,example:text=freshmode, timeout=15,", "args: A tuple containing value and parameter name :return: \"\"\" items = []", "match_obj_group_1 is not None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is not None:", "back first,swipeCount=40 match_obj_group_1(text=freshmode, timeout=15) f the conditions are still met, split again, Until", "match_obj is not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position textMatches=shanghai.? .?economic.?, fuzzyMatch=true text=freshmode,", "f the conditions are still met, split again, Until the split to the", "group_1 break else: result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444',", "is not None: match_obj_group_1 = re.match(functin_pattern, group_1) if match_obj_group_1 is not None: group_1", "match_obj.group(1).strip().replace(u\"\\u200b\", \"\") result_dic[match_obj.group(2)] = match_obj.group(3) match_obj_group_1 = re.match(functin_pattern, group_1) while match_obj_group_1 is not", "match_obj = re.match(functin_pattern, dsl_params) if match_obj is not None: \"\"\" senario: Flight, verifyEle=center_content_layout,", "result_dic = {} functin_pattern = re.compile(r\"([\\S\\s]+),\\s*([a-zA-Z0-9_]+)\\s*=\\s*(\\S+)\") if isinstance(dsl_params, str): result_dic = add_res_dic(dsl_params, functin_pattern,", "and optional parameters \"\"\" result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1]", "\"\"\" get global attribute value \"\"\" if value is not None: return value", "return_value(value, def_value=None): \"\"\" get global attribute value \"\"\" if value is not None:", "break else: result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic)", "dsl statement into dict format for use in subsequent processes \"\"\" result_dic =", "def_value=None): \"\"\" get global attribute value \"\"\" if value is not None: return", "context: step context :param args: A tuple containing value and parameter name :return:", "in args: if val is not None: items.append(val) elif hasattr(context, param_name): items.append(getattr(context, param_name))", "re.match(functin_pattern, group_1) if match_obj_group_1 is not None: group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") )", "group_1 = ( match_obj_group_1.group(1).strip().replace(u\"\\u200b\", \"\") ) result_dic[ match_obj_group_1.group(2) ] = match_obj_group_1.group(3) else: result_dic[def_key]", "swipeCount=40 multi properities,example:text=freshmode, timeout=15, swipeCount=40 Match from back to front, match back first,swipeCount=40", "parameters \"\"\" result = dsl_params.split(\",\", 1) result[0] = result[0].strip().replace(u\"\\u200b\", \"\") result[1] = result[1].strip().replace(u\"\\u200b\",", "match_obj_group_1.group(3) else: result_dic[def_key] = group_1 break else: result_dic[def_key] = group_1 else: result_dic[def_key] =", "result_dic[def_key] = group_1 else: result_dic[def_key] = dsl_params.strip().replace(u\"\\u200b\", \"\") # print('result_dic44444', result_dic) return result_dic", "= re.match(functin_pattern, dsl_params) if match_obj is not None: \"\"\" senario: Flight, verifyEle=center_content_layout, verifyAction=position", "result_dic) return result_dic # generate result_dic def params_to_dic(dsl_params, def_key=\"selector\"): \"\"\" Convert the parameters", "global attribute value \"\"\" if value is not None: return value return def_value" ]
[ "if x != y: e = e + 1 result.append((u,v)) union(G, x, y)", "if i > len(sortedges) - 1: return [] u,v = sortedges[i] i =", "e = e + 1 result.append((u,v)) union(G, x, y) # Else discard the", "List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in output: P = nx.Graph() for", "10): task.append((n, i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading, task) if __name__", "= e + 1 result.append((u,v)) union(G, x, y) # Else discard the edge", "task = large_index pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1] f = open(path,", "union(G, x, y) # Else discard the edge for i,j in result: cost", "print(\"MDT gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() - start_time", "x = find(G, u) y = find(G ,v) # If including this edge", "len(output) < depth: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp)", "a copy of graph if len(normal_edges) == 0: return result sortedges = sorted(", "newcost < oldcost: P = deletenode(G,O) return P return P def starter(T,O): GraphArray", "V-1 while e < G.number_of_nodes() - 1: # Step 2: Pick the smallest", "cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading, task) if __name__ == \"__main__\": p_main()", "result[] for node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for u,", "= 0 for i in range(len(leaves)): if cnt < 3: G = T.copy()", "STs: if i < depth: i += 1 result += [starter(ST,G)] print(\"MDT gen!\")", "sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0])", "#t = time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O):", "key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3]", "List, MST) for edges in output: P = nx.Graph() for edge in edges:", "= small_index elif tt == 'medium': task = med_index elif tt == 'large':", "P2.copy() def KruskalMST(P): G = P.copy() cost = 0 normal_edges = [] #store", "i in range(len(leaves)): if cnt < 3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G):", "1: # Step 2: Pick the smallest edge and increment # the index", "solver.py def solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path)", "#print(result[0]) return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = [] P =", "for u, v in G.edges: G.edges[u, v]['property'] = 'normal' List = {G} G.graph['MST']", "[] for node in T.nodes: if T.degree[node] == 1: leaves += [node] leaves", "T.nodes: if T.degree[node] == 1: leaves += [node] leaves = sorted( leaves, key=lambda", "GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if T.degree[node]", "med_index + small_index elif tt == 'small': task = small_index elif tt ==", "# Number of edges to be taken is equal to V-1 while e", "leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0", "P3 = P1.copy() for u, v in P1.edges: if P1.edges[u, v]['property'] == 'excluded':", "result and increment the index # of result for next edge if x", "P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes()", "v)) # Step 1: Sort all the edges in non-decreasing # order of", "nx.Graph() P2 = nx.Graph() P1 = P.copy() P2 = P.copy() for u, v", "to change the # given graph, we can create a copy of graph", "= sortedges[i] i = i + 1 x = find(G, u) y =", "in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for u, v in G.edges:", "list(range(1, 304)) med_index = list(range(304, 607)) large_index = list(range(607, 1007)) if tt ==", "all the edges in non-decreasing # order of their # weight. If we", "P1.copy() for u, v in P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v)", "+= P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result return result def union(G,", "v]['property'] = 'normal' List = {G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if", "= find(G, u) y = find(G ,v) union(G, x, y) result.append((u, v)) e", "import sys import time import multiprocessing def solve(G, depth): \"\"\" Args: G: networkx.Graph", "#store all the normal edges result =[] #This will store the resultant MST", "used for sorted edges e = 0 # An index variable, used for", "STs = genST(G, depth) print(\"STs gen!\") i = 0 for ST in STs:", "is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: P = deletenode(G,O) return P", "G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for u, v in G.edges: if", "delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost =", "{tuple(G.graph['MST'])} if depth == -1: while len(MST) != 0: temp = min(List, key", ": G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def find(G, i): if G.nodes[i]['parent'] ==", "An index variable, used for result[] for node in G.nodes: G.nodes[node]['parent'] = node", "G.edges[u, v]['property'] = 'normal' List = {G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])}", "= multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1, 304)) med_index = list(range(304, 607))", "return delete3node(newGraphArray,O) def genST(G, depth): output = [] outgraphs = [] for u,", "key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while", "edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs def Partition(P,", "= find(G ,v) union(G, x, y) result.append((u, v)) e += 1 elif G.edges[u,", "= read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T)))", "import read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time", "1: Sort all the edges in non-decreasing # order of their # weight.", "+= [G] if len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray) == 2: return", "def p_main(): path = sys.argv[1] f = open(path, 'r') lines = f.readlines() task", "find(G ,v) # If including this edge does't cause cycle, # include it", "= [] P = T.copy() for node in T.nodes: if T.degree[node] == 1:", "T) def main(): tt = sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task", "resultant MST i = 0 # An index variable, used for sorted edges", "G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G) if newcost < oldcost:", "x) yroot = find(G, y) # Attach smaller rank tree under root of", "MST i = 0 # An index variable, used for sorted edges e", "x != y: e = e + 1 result.append((u,v)) union(G, x, y) #", "for sorted edges e = 0 # An index variable, used for result[]", "\"all\": task = large_index + med_index + small_index elif tt == 'small': task", "edge in ST.edges: weight += ST.edges[edge]['weight'] for ST in STs: if i <", "i < depth: i += 1 result += [starter(ST,G)] print(\"MDT gen!\") result =", "import networkx as nx from parse import read_input_file, write_output_file from Utility import is_valid_network,", "edge[1]]['weight'] outgraphs += [P] return outgraphs def Partition(P, List, MST): P1 = nx.Graph()", "sorted edges e = 0 # An index variable, used for result[] for", "e += 1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u,", "(l, r) = line.split() (n, i) = l.split('-') i = int(i) print(n,i,r) if(int(r)", "while len(MST) != 0 and len(output) < depth: temp = min(List, key =", "[G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return", "in range(len(leaves)): if cnt < 3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt", "== 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy()", "u, v in P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3,", "'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1 is connected P3", "in STs: if i < depth: i += 1 result += [starter(ST,G)] print(\"MDT", "the index for next iteration if i > len(sortedges) - 1: return []", "'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1", "x, y) # Else discard the edge for i,j in result: cost +=", "of graph if len(normal_edges) == 0: return result sortedges = sorted( normal_edges, key=lambda", "= deletenode(G,O) return P return P def starter(T,O): GraphArray = [] oldcost =", "G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If", "from parse import read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys", "= genST(G, depth) print(\"STs gen!\") i = 0 for ST in STs: weight", "newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost", "+= 1 result += [starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G))", "newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output = [] outgraphs = [] for", "u, v in G.edges: G.edges[u, v]['property'] = 'normal' List = {G} G.graph['MST'] =", "average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O)", "< oldcost: GraphArray += [G] if len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray)", "= average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray += [G] if len(GraphArray) < 2:", "[] for line in lines: (l, r) = line.split() (n, i) = l.split('-')", "G.nodes[xroot]['rank'] += 1 def find(G, i): if G.nodes[i]['parent'] == i: return i return", "if newcost < oldcost: newGraphArray += [G] newGraphArray = sorted( newGraphArray, key=lambda tree:", "xroot # If ranks are same, then make one as root # and", "== 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1))", "== i: return i return find(G, G.nodes[i]['parent']) # Here's an example of how", "= [] oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if", "MSTP1 #check if P1 is connected P3 = P1.copy() for u, v in", "open(path, 'r') lines = f.readlines() task = [] for line in lines: (l,", "r) = line.split() (n, i) = l.split('-') i = int(i) print(n,i,r) if(int(r) >", "ST.edges[edge]['weight'] for ST in STs: if i < depth: i += 1 result", "= sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total time takes:%d\"%t)", "P def starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves = [] for", "return deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def", "i = 0 for ST in STs: weight = 0 for edge in", "connected P3 = P1.copy() for u, v in P1.edges: if P1.edges[u, v]['property'] ==", "G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def find(G, i): if G.nodes[i]['parent'] == i:", "min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for", "edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs def Partition(P, List, MST):", "outgraphs def Partition(P, List, MST): P1 = nx.Graph() P2 = nx.Graph() P1 =", "Create V subsets with single elements # Number of edges to be taken", "If ranks are same, then make one as root # and increment its", "T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes:", "(list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for i in range(len(leaves)): if cnt <", "MST = {tuple(G.graph['MST'])} if depth == -1: while len(MST) != 0: temp =", "output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in output: P = nx.Graph()", "#check if P1 is connected P3 = P1.copy() for u, v in P1.edges:", "= 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1 is connected", "+= [P] return outgraphs def Partition(P, List, MST): P1 = nx.Graph() P2 =", "is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray +=", "with single elements # Number of edges to be taken is equal to", "index variable, used for result[] for node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank']", "1 result.append((u,v)) union(G, x, y) # Else discard the edge for i,j in", "rank tree (Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif", "G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost:", "and increment its rank by one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] +=", "an example of how to run your solver. # Usage: python3 solver.py def", "- 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G = P.copy() cost", "= average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if T.degree[node] == 1:", "solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def", "average_pairwise_distance_fast(G) if newcost < oldcost: P = deletenode(G,O) return P return P def", "= f.readlines() task = [] for line in lines: (l, r) = line.split()", "]['weight'],reverse=True) cnt = 0 for i in range(len(leaves)): if cnt < 3: G", "for node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for u, v", "i,j in result: cost += P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result", "]['weight'],reverse=True) for i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost =", "def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = [] P = T.copy() for node", "P1 = P2.copy() def KruskalMST(P): G = P.copy() cost = 0 normal_edges =", "List = {G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth == -1:", "= {tuple(G.graph['MST'])} if depth == -1: while len(MST) != 0: temp = min(List,", "find(G, G.nodes[i]['parent']) # Here's an example of how to run your solver. #", "given graph, we can create a copy of graph if len(normal_edges) == 0:", "all the normal edges result =[] #This will store the resultant MST i", "0 for i in range(len(leaves)): if cnt < 3: G = T.copy() G.remove_node(leaves[i])", "0 for ST in STs: weight = 0 for edge in ST.edges: weight", "= [] #store all the normal edges result =[] #This will store the", "= find(G ,v) # If including this edge does't cause cycle, # include", "def main(): tt = sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task =", "== 'small': task = small_index elif tt == 'medium': task = med_index elif", "= lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST)", "= [] small_index = list(range(1, 304)) med_index = list(range(304, 607)) large_index = list(range(607,", "g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST) != 0 and", "g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in output: P", "MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G = P.copy() cost = 0 normal_edges", "P2 = P.copy() for u, v in P.graph['MST']: if P.edges[u, v]['property'] == 'normal':", "+= [G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2:", "[] for u, v in G.edges: G.edges[u, v]['property'] = 'normal' List = {G}", "P2 = nx.Graph() P1 = P.copy() P2 = P.copy() for u, v in", "# Step 1: Sort all the edges in non-decreasing # order of their", "[] #store all the normal edges result =[] #This will store the resultant", "= list(range(607, 1007)) if tt == \"all\": task = large_index + med_index +", "ST in STs: weight = 0 for edge in ST.edges: weight += ST.edges[edge]['weight']", "rank by one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def find(G,", "P return P def starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves =", "1007)) if tt == \"all\": task = large_index + med_index + small_index elif", "node) ]['weight'],reverse=True) for i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost", "tt == \"all\": task = large_index + med_index + small_index elif tt ==", "KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth == -1: while len(MST) != 0: temp", "newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output = []", "time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost =", "order of their # weight. If we are not allowed to change the", "path = sys.argv[1] f = open(path, 'r') lines = f.readlines() task = []", "= med_index elif tt == 'large': task = large_index pool.map(solver_multi_threading, task) def p_main():", "sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V subsets with single", "edge: G.edges[edge]['weight']) # Create V subsets with single elements # Number of edges", "in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost", "i): if G.nodes[i]['parent'] == i: return i return find(G, G.nodes[i]['parent']) # Here's an", "import time import multiprocessing def solve(G, depth): \"\"\" Args: G: networkx.Graph Returns: T:", "copy of graph if len(normal_edges) == 0: return result sortedges = sorted( normal_edges,", "while e < G.number_of_nodes() - 1: # Step 2: Pick the smallest edge", "are same, then make one as root # and increment its rank by", "If including this edge does't cause cycle, # include it in result and", "= l.split('-') i = int(i) print(n,i,r) if(int(r) > 10): task.append((n, i)) cores =", "for i,j in result: cost += P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] =", "oldcost: GraphArray += [G] if len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray) ==", "0 normal_edges = [] #store all the normal edges result =[] #This will", "smallest edge and increment # the index for next iteration if i >", "[] small_index = list(range(1, 304)) med_index = list(range(304, 607)) large_index = list(range(607, 1007))", "+= [node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt", "can create a copy of graph if len(normal_edges) == 0: return result sortedges", "== \"all\": task = large_index + med_index + small_index elif tt == 'small':", "G.nodes[yroot]['parent'] = xroot # If ranks are same, then make one as root", "= [] for line in lines: (l, r) = line.split() (n, i) =", "average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if T.degree[node] == 1: leaves", "line.split() (n, i) = l.split('-') i = int(i) print(n,i,r) if(int(r) > 10): task.append((n,", "while len(MST) != 0: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST'])", "starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves = [] for node in", "= int(i) print(n,i,r) if(int(r) > 10): task.append((n, i)) cores = multiprocessing.cpu_count() pool =", "= solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T)", "G.edges: G.edges[u, v]['property'] = 'normal' List = {G} G.graph['MST'] = KruskalMST(G) MST =", "range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost <", "result.append((u,v)) union(G, x, y) # Else discard the edge for i,j in result:", "G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G) if", "== 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step", "1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path)) T =", "as root # and increment its rank by one else : G.nodes[yroot]['parent'] =", "< depth: i += 1 result += [starter(ST,G)] print(\"MDT gen!\") result = sorted(result,", "= newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray:", "gen!\") i = 0 for ST in STs: weight = 0 for edge", "outgraphs += [P] return outgraphs def Partition(P, List, MST): P1 = nx.Graph() P2", "high rank tree (Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot", "f.readlines() task = [] for line in lines: (l, r) = line.split() (n,", "def union(G, x, y): xroot = find(G, x) yroot = find(G, y) #", "tt == 'large': task = large_index pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1]", "is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time import multiprocessing def solve(G, depth): \"\"\"", "return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = [] P = T.copy()", "+= ST.edges[edge]['weight'] for ST in STs: if i < depth: i += 1", "normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step 1: Sort all the edges in", "their # weight. If we are not allowed to change the # given", "edge does't cause cycle, # include it in result and increment the index", "leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in", "MST) for edges in output: P = nx.Graph() for edge in edges: P.add_edge(edge[0],", "0 and len(output) < depth: temp = min(List, key = lambda g: g.graph['cost'])", "G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth == -1: while len(MST) !=", "[node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt =", "edges result =[] #This will store the resultant MST i = 0 #", "= sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for", "1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step 1: Sort all the edges", "change the # given graph, we can create a copy of graph if", "same, then make one as root # and increment its rank by one", "and len(output) < depth: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST'])", "node) ]['weight'],reverse=True) cnt = 0 for i in range(len(leaves)): if cnt < 3:", "depth: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp,", "if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray += [G] if", "v)) e += 1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000", "1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else:", "average_pairwise_distance_fast(T) leaves = [] P = T.copy() for node in T.nodes: if T.degree[node]", "= sys.argv[1] f = open(path, 'r') lines = f.readlines() task = [] for", "multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1, 304)) med_index =", "def find(G, i): if G.nodes[i]['parent'] == i: return i return find(G, G.nodes[i]['parent']) #", "= MSTP1 #check if P1 is connected P3 = P1.copy() for u, v", "output: P = nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] =", "be taken is equal to V-1 while e < G.number_of_nodes() - 1: #", "u) y = find(G ,v) union(G, x, y) result.append((u, v)) e += 1", "#print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves =", "Step 1: Sort all the edges in non-decreasing # order of their #", "= T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: P", "y) # Attach smaller rank tree under root of # high rank tree", "P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1", "v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 =", "pool = multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1, 304)) med_index = list(range(304,", "index # of result for next edge if x != y: e =", "python3 solver.py def solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G =", "iteration if i > len(sortedges) - 1: return [] u,v = sortedges[i] i", "= node G.nodes[node]['rank'] = 0 for u, v in G.edges: if G.edges[u, v]['property']", "= multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1, 304)) med_index", "else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output = [] outgraphs", "newcost = average_pairwise_distance_fast(G) if newcost < oldcost: P = deletenode(G,O) return P return", "sortedges[i] i = i + 1 x = find(G, u) y = find(G", "G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray += [G]", "v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) #", "y) result.append((u, v)) e += 1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight']", "in lines: (l, r) = line.split() (n, i) = l.split('-') i = int(i)", "- start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T)", "= 'normal' List = {G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth", "i return find(G, G.nodes[i]['parent']) # Here's an example of how to run your", "in ST.edges: weight += ST.edges[edge]['weight'] for ST in STs: if i < depth:", "is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray += [G] if len(GraphArray)", "task.append((n, i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading, task) if __name__ ==", "next edge if x != y: e = e + 1 result.append((u,v)) union(G,", "= cost P.graph['MST'] = result return result def union(G, x, y): xroot =", "discard the edge for i,j in result: cost += P.edges[i, j]['weight'] P.graph['cost'] =", "in result: cost += P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result return", "smaller rank tree under root of # high rank tree (Union by Rank)", "yroot = find(G, y) # Attach smaller rank tree under root of #", "len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G,", "Partition(temp, List, MST) else: while len(MST) != 0 and len(output) < depth: temp", "#This will store the resultant MST i = 0 # An index variable,", "edges e = 0 # An index variable, used for result[] for node", "G.edges: if G.edges[u, v]['property'] == 'included': x = find(G, u) y = find(G", "next iteration if i > len(sortedges) - 1: return [] u,v = sortedges[i]", "example of how to run your solver. # Usage: python3 solver.py def solver_multi_threading(i,", "= large_index pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1] f = open(path, 'r')", "< 2: return deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3],", "elif tt == 'medium': task = med_index elif tt == 'large': task =", "is connected P3 = P1.copy() for u, v in P1.edges: if P1.edges[u, v]['property']", "= result return result def union(G, x, y): xroot = find(G, x) yroot", "union(G, x, y): xroot = find(G, x) yroot = find(G, y) # Attach", "e = 0 # An index variable, used for result[] for node in", "index variable, used for sorted edges e = 0 # An index variable,", "small_index elif tt == 'small': task = small_index elif tt == 'medium': task", "Pick the smallest edge and increment # the index for next iteration if", "networkx as nx from parse import read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance,", "< 3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost =", "P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1", "# given graph, we can create a copy of graph if len(normal_edges) ==", "graph if len(normal_edges) == 0: return result sortedges = sorted( normal_edges, key=lambda edge:", "in result and increment the index # of result for next edge if", "for next edge if x != y: e = e + 1 result.append((u,v))", "ranks are same, then make one as root # and increment its rank", "P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G = P.copy()", "increment its rank by one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1", "= 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step 1: Sort all the", "edge and increment # the index for next iteration if i > len(sortedges)", "are not allowed to change the # given graph, we can create a", "= sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = [] small_index =", "u, v in G.edges: if G.edges[u, v]['property'] == 'included': x = find(G, u)", "= 0 normal_edges = [] #store all the normal edges result =[] #This", "equal to V-1 while e < G.number_of_nodes() - 1: # Step 2: Pick", "of result for next edge if x != y: e = e +", "= \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G, depth)", "sys import time import multiprocessing def solve(G, depth): \"\"\" Args: G: networkx.Graph Returns:", "len(sortedges) - 1: return [] u,v = sortedges[i] i = i + 1", "0: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp,", "key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0]) return", "(list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G):", "large_index pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1] f = open(path, 'r') lines", "= list(range(304, 607)) large_index = list(range(607, 1007)) if tt == \"all\": task =", "\"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G, depth) #print(\"Average", "in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return", "[node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i", "of edges to be taken is equal to V-1 while e < G.number_of_nodes()", "- 1: # Step 2: Pick the smallest edge and increment # the", "= i + 1 x = find(G, u) y = find(G ,v) #", "normal edges result =[] #This will store the resultant MST i = 0", "solver. # Usage: python3 solver.py def solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0],", "sys.argv[1] f = open(path, 'r') lines = f.readlines() task = [] for line", "else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray:", "large_index + med_index + small_index elif tt == 'small': task = small_index elif", "for line in lines: (l, r) = line.split() (n, i) = l.split('-') i", "cost P.graph['MST'] = result return result def union(G, x, y): xroot = find(G,", "304)) med_index = list(range(304, 607)) large_index = list(range(607, 1007)) if tt == \"all\":", "P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 =", "= xroot G.nodes[xroot]['rank'] += 1 def find(G, i): if G.nodes[i]['parent'] == i: return", "average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray += [G] if len(GraphArray) < 2: return", "and increment the index # of result for next edge if x !=", "elif tt == 'large': task = large_index pool.map(solver_multi_threading, task) def p_main(): path =", "< oldcost: P = deletenode(G,O) return P return P def starter(T,O): GraphArray =", "T = solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]),", "newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray =", "MST): P1 = nx.Graph() P2 = nx.Graph() P1 = P.copy() P2 = P.copy()", "T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray +=", "import multiprocessing def solve(G, depth): \"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\"", "pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt =", "tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return", "len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P):", "distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt = sys.argv[1]", "'r') lines = f.readlines() task = [] for line in lines: (l, r)", "T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for i in range(len(leaves)): if cnt", "list(range(304, 607)) large_index = list(range(607, 1007)) if tt == \"all\": task = large_index", "print(n,i,r) if(int(r) > 10): task.append((n, i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading,", "run your solver. # Usage: python3 solver.py def solver_multi_threading(i, depth = 1000): path", "multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1, 304)) med_index = list(range(304, 607)) large_index", "P.graph['MST'] = result return result def union(G, x, y): xroot = find(G, x)", "oldcost: newGraphArray += [G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray)", "[starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() -", "if cnt < 3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1", "elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u,", "1: return [] u,v = sortedges[i] i = i + 1 x =", "v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step 1: Sort all", "temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List,", "task) def p_main(): path = sys.argv[1] f = open(path, 'r') lines = f.readlines()", "result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total time", "lines: (l, r) = line.split() (n, i) = l.split('-') i = int(i) print(n,i,r)", "cnt < 3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost", "Step 2: Pick the smallest edge and increment # the index for next", "cost += P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result return result def", "y): xroot = find(G, x) yroot = find(G, y) # Attach smaller rank", "G = P.copy() cost = 0 normal_edges = [] #store all the normal", "for u, v in P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if", "success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0],", "in STs: weight = 0 for edge in ST.edges: weight += ST.edges[edge]['weight'] for", "as nx from parse import read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast", "deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = [] P = T.copy() for node in", "newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = []", "edges in non-decreasing # order of their # weight. If we are not", "elements # Number of edges to be taken is equal to V-1 while", "= T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G) if newcost", "= find(G, y) # Attach smaller rank tree under root of # high", "= [] STs = genST(G, depth) print(\"STs gen!\") i = 0 for ST", "3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G)", "cnt = 0 for i in range(len(leaves)): if cnt < 3: G =", "MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1 is connected P3 =", "List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G = P.copy() cost = 0", "lines = f.readlines() task = [] for line in lines: (l, r) =", "> G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks are same, then make one", "G: average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0]", "find(G, x) yroot = find(G, y) # Attach smaller rank tree under root", "print(\"STs gen!\") i = 0 for ST in STs: weight = 0 for", "leaves = [] for node in T.nodes: if T.degree[node] == 1: leaves +=", "node G.nodes[node]['rank'] = 0 for u, v in G.edges: if G.edges[u, v]['property'] ==", "depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main():", "v in P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1)))", "else: normal_edges.append((u, v)) # Step 1: Sort all the edges in non-decreasing #", "Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time import multiprocessing def solve(G,", "if depth == -1: while len(MST) != 0: temp = min(List, key =", "Partition(temp, List, MST) for edges in output: P = nx.Graph() for edge in", "v in G.edges: if G.edges[u, v]['property'] == 'included': x = find(G, u) y", "newGraphArray += [G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) ==", "line in lines: (l, r) = line.split() (n, i) = l.split('-') i =", "average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def", "return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost", "if G.edges[u, v]['property'] == 'included': x = find(G, u) y = find(G ,v)", "# Here's an example of how to run your solver. # Usage: python3", "delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in", "for edges in output: P = nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1])", "return outgraphs def Partition(P, List, MST): P1 = nx.Graph() P2 = nx.Graph() P1", "i += 1 result += [starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda G:", "if len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else:", "P1 = P.copy() P2 = P.copy() for u, v in P.graph['MST']: if P.edges[u,", "pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1] f = open(path, 'r') lines =", "= [] for node in T.nodes: if T.degree[node] == 1: leaves += [node]", "P1 is connected P3 = P1.copy() for u, v in P1.edges: if P1.edges[u,", "= 0 # An index variable, used for sorted edges e = 0", "average_pairwise_distance, average_pairwise_distance_fast import sys import time import multiprocessing def solve(G, depth): \"\"\" Args:", "find(G, u) y = find(G ,v) # If including this edge does't cause", "y = find(G ,v) # If including this edge does't cause cycle, #", "multiprocessing def solve(G, depth): \"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\" result", "g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST) != 0", "[] u,v = sortedges[i] i = i + 1 x = find(G, u)", "# Else discard the edge for i,j in result: cost += P.edges[i, j]['weight']", "def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves", "y: e = e + 1 result.append((u,v)) union(G, x, y) # Else discard", "u, v in P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded'", "med_index = list(range(304, 607)) large_index = list(range(607, 1007)) if tt == \"all\": task", "newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for", "Returns: T: networkx.Graph \"\"\" result = [] STs = genST(G, depth) print(\"STs gen!\")", "return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost =", "== 1: leaves += [node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0],", "edge for i,j in result: cost += P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST']", "depth) print(\"STs gen!\") i = 0 for ST in STs: weight = 0", "create a copy of graph if len(normal_edges) == 0: return result sortedges =", "v]['property'] == 'included': x = find(G, u) y = find(G ,v) union(G, x,", "the edge for i,j in result: cost += P.edges[i, j]['weight'] P.graph['cost'] = cost", "for ST in STs: weight = 0 for edge in ST.edges: weight +=", "main(): tt = sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = []", "'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST']", "x = find(G, u) y = find(G ,v) union(G, x, y) result.append((u, v))", "j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result return result def union(G, x, y):", "< G.number_of_nodes() - 1: # Step 2: Pick the smallest edge and increment", "for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = [] for node in", "# high rank tree (Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] =", "G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: P = deletenode(G,O)", "= P2.copy() def KruskalMST(P): G = P.copy() cost = 0 normal_edges = []", "# Attach smaller rank tree under root of # high rank tree (Union", "rank tree under root of # high rank tree (Union by Rank) if", "lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST) !=", "read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output", "outgraphs = [] for u, v in G.edges: G.edges[u, v]['property'] = 'normal' List", "for u, v in P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] =", "depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path))", "med_index elif tt == 'large': task = large_index pool.map(solver_multi_threading, task) def p_main(): path", "\"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\" result = [] STs =", "the smallest edge and increment # the index for next iteration if i", "in non-decreasing # order of their # weight. If we are not allowed", "G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks are same, then make", "# of result for next edge if x != y: e = e", "how to run your solver. # Usage: python3 solver.py def solver_multi_threading(i, depth =", "G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot", "== P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G =", "in G.edges: G.edges[u, v]['property'] = 'normal' List = {G} G.graph['MST'] = KruskalMST(G) MST", "return P return P def starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves", "weight. If we are not allowed to change the # given graph, we", "if is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray", "1 result += [starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t", "key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for i in range(len(leaves)):", "= sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V subsets with single elements", "result sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V subsets with", "G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks are same, then make one as", "P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1:", "return i return find(G, G.nodes[i]['parent']) # Here's an example of how to run", "to be taken is equal to V-1 while e < G.number_of_nodes() - 1:", "if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def", "= KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1 is connected P3 = P1.copy()", "0 # An index variable, used for result[] for node in G.nodes: G.nodes[node]['parent']", "T.copy() for node in T.nodes: if T.degree[node] == 1: leaves += [node] leaves", "networkx.Graph \"\"\" result = [] STs = genST(G, depth) print(\"STs gen!\") i =", "= time.time() - start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost", "key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)): G = T.copy()", "lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in output:", "= P.copy() for u, v in P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u,", "cnt += 1 newcost = average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray += [G]", "non-decreasing # order of their # weight. If we are not allowed to", "if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included'", "607)) large_index = list(range(607, 1007)) if tt == \"all\": task = large_index +", "in T.nodes: if T.degree[node] == 1: leaves += [node] leaves = sorted( leaves,", "= lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in", "e < G.number_of_nodes() - 1: # Step 2: Pick the smallest edge and", "i: return i return find(G, G.nodes[i]['parent']) # Here's an example of how to", "def starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves = [] for node", "Attach smaller rank tree under root of # high rank tree (Union by", "one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def find(G, i): if", "+= [node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for", "root # and increment its rank by one else : G.nodes[yroot]['parent'] = xroot", "+ small_index elif tt == 'small': task = small_index elif tt == 'medium':", "find(G, i): if G.nodes[i]['parent'] == i: return i return find(G, G.nodes[i]['parent']) # Here's", "G = read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise distance:", "\"\"\" result = [] STs = genST(G, depth) print(\"STs gen!\") i = 0", "min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else:", "depth == -1: while len(MST) != 0: temp = min(List, key = lambda", "newcost = average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray += [G] newGraphArray = sorted(", "the edges in non-decreasing # order of their # weight. If we are", "taken is equal to V-1 while e < G.number_of_nodes() - 1: # Step", "genST(G, depth) print(\"STs gen!\") i = 0 for ST in STs: weight =", "i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if", "= min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST)", "g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in output: P =", "G.nodes[i]['parent'] == i: return i return find(G, G.nodes[i]['parent']) # Here's an example of", "u,v = sortedges[i] i = i + 1 x = find(G, u) y", "P.copy() cost = 0 normal_edges = [] #store all the normal edges result", "P.copy() for u, v in P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property']", "[G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return", "leaves += [node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True)", "allowed to change the # given graph, we can create a copy of", "edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs", "O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T)", "return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output =", "in output: P = nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight']", "if len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def", "# the index for next iteration if i > len(sortedges) - 1: return", "GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes:", "T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt += 1 newcost = average_pairwise_distance_fast(G) if newcost <", "graph, we can create a copy of graph if len(normal_edges) == 0: return", "= [] outgraphs = [] for u, v in G.edges: G.edges[u, v]['property'] =", "if len(normal_edges) == 0: return result sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight'])", "# order of their # weight. If we are not allowed to change", "task = [] for line in lines: (l, r) = line.split() (n, i)", "elif tt == 'small': task = small_index elif tt == 'medium': task =", "yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks are same,", "for i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G)", "import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time import multiprocessing def solve(G, depth):", "return P def starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T) leaves = []", "to V-1 while e < G.number_of_nodes() - 1: # Step 2: Pick the", "weight += ST.edges[edge]['weight'] for ST in STs: if i < depth: i +=", "{} success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path))", "result += [starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t =", "= sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)):", "leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)): G =", "tt == 'small': task = small_index elif tt == 'medium': task = med_index", "== -1: while len(MST) != 0: temp = min(List, key = lambda g:", "then make one as root # and increment its rank by one else", "tt = sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = [] small_index", "2: Pick the smallest edge and increment # the index for next iteration", "write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time import multiprocessing", "1 def find(G, i): if G.nodes[i]['parent'] == i: return i return find(G, G.nodes[i]['parent'])", "newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0]", "def KruskalMST(P): G = P.copy() cost = 0 normal_edges = [] #store all", "we are not allowed to change the # given graph, we can create", "T: networkx.Graph \"\"\" result = [] STs = genST(G, depth) print(\"STs gen!\") i", "by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']:", "tt == 'medium': task = med_index elif tt == 'large': task = large_index", "y = find(G ,v) union(G, x, y) result.append((u, v)) e += 1 elif", "small_index elif tt == 'medium': task = med_index elif tt == 'large': task", "it in result and increment the index # of result for next edge", "= P.copy() cost = 0 normal_edges = [] #store all the normal edges", "edge if x != y: e = e + 1 result.append((u,v)) union(G, x,", "v in P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u,", "newcost = average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray += [G] if len(GraphArray) <", "T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i]) if", "= GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = [] for", "== 2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray", "= 0 for ST in STs: weight = 0 for edge in ST.edges:", "[P] return outgraphs def Partition(P, List, MST): P1 = nx.Graph() P2 = nx.Graph()", "in G.edges: if G.edges[u, v]['property'] == 'included': x = find(G, u) y =", "newcost < oldcost: GraphArray += [G] if len(GraphArray) < 2: return deletenode(T,O) elif", "G.nodes[node]['rank'] = 0 for u, v in G.edges: if G.edges[u, v]['property'] == 'included':", "e + 1 result.append((u,v)) union(G, x, y) # Else discard the edge for", "Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\" result = [] STs = genST(G,", "edges in output: P = nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0],", "for i in range(len(leaves)): if cnt < 3: G = T.copy() G.remove_node(leaves[i]) if", "P = deletenode(G,O) return P return P def starter(T,O): GraphArray = [] oldcost", "[G] if len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O)", "of how to run your solver. # Usage: python3 solver.py def solver_multi_threading(i, depth", "= average_pairwise_distance_fast(T) leaves = [] P = T.copy() for node in T.nodes: if", "f = open(path, 'r') lines = f.readlines() task = [] for line in", "T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: P =", "+= [G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3:", "= 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check", "make one as root # and increment its rank by one else :", "oldcost: P = deletenode(G,O) return P return P def starter(T,O): GraphArray = []", "output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST) != 0 and len(output)", "increment # the index for next iteration if i > len(sortedges) - 1:", ",v) # If including this edge does't cause cycle, # include it in", "!= y: e = e + 1 result.append((u,v)) union(G, x, y) # Else", "if P1 is connected P3 = P1.copy() for u, v in P1.edges: if", "task = large_index + med_index + small_index elif tt == 'small': task =", "u) y = find(G ,v) # If including this edge does't cause cycle,", "< depth: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST']))", "[] oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if T.degree[node]", "== 0: return result sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create", "Else discard the edge for i,j in result: cost += P.edges[i, j]['weight'] P.graph['cost']", "def solve(G, depth): \"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\" result =", "sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1,", "for result[] for node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for", "# and increment its rank by one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank']", "{G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth == -1: while len(MST)", "single elements # Number of edges to be taken is equal to V-1", "list(range(607, 1007)) if tt == \"all\": task = large_index + med_index + small_index", "task = med_index elif tt == 'large': task = large_index pool.map(solver_multi_threading, task) def", "= T.copy() for node in T.nodes: if T.degree[node] == 1: leaves += [node]", "increment the index # of result for next edge if x != y:", "time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = []", "if len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def", "else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def find(G, i): if G.nodes[i]['parent']", "Usage: python3 solver.py def solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G", "tree (Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank']", "deletenode(G,O) return P return P def starter(T,O): GraphArray = [] oldcost = average_pairwise_distance_fast(T)", "= P1.copy() for u, v in P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u,", "include it in result and increment the index # of result for next", "weight = 0 for edge in ST.edges: weight += ST.edges[edge]['weight'] for ST in", "1 newcost = average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray += [G] newGraphArray =", "nx from parse import read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import", "= 0 for edge in ST.edges: weight += ST.edges[edge]['weight'] for ST in STs:", "cause cycle, # include it in result and increment the index # of", "is equal to V-1 while e < G.number_of_nodes() - 1: # Step 2:", "# An index variable, used for result[] for node in G.nodes: G.nodes[node]['parent'] =", "# If ranks are same, then make one as root # and increment", "newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0]", "MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges in output: P = nx.Graph() for edge", "p_main(): path = sys.argv[1] f = open(path, 'r') lines = f.readlines() task =", "P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs def Partition(P, List,", "nx.Graph() P1 = P.copy() P2 = P.copy() for u, v in P.graph['MST']: if", "+ 1 x = find(G, u) y = find(G ,v) # If including", "newcost < oldcost: newGraphArray += [G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree))", "'normal' List = {G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth ==", "> len(sortedges) - 1: return [] u,v = sortedges[i] i = i +", "union(G, x, y) result.append((u, v)) e += 1 elif G.edges[u, v]['property'] == 'excluded':", "GraphArray += [G] if len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray) == 2:", "its rank by one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def", "def solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input", "P = nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0],", "= T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: GraphArray", "average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray += [G] newGraphArray = sorted( newGraphArray, key=lambda", "ST in STs: if i < depth: i += 1 result += [starter(ST,G)]", "def genST(G, depth): output = [] outgraphs = [] for u, v in", "print(\"Input {} success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {}", "2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray =", "'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step 1:", "networkx.Graph Returns: T: networkx.Graph \"\"\" result = [] STs = genST(G, depth) print(\"STs", "solve(G, depth): \"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\" result = []", "start_time #print(\"total time takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves", "node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)): G = T.copy() G.remove_node(leaves[i])", "== 'included': x = find(G, u) y = find(G ,v) union(G, x, y)", "delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves =", "will store the resultant MST i = 0 # An index variable, used", "path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G,", "+ 1 result.append((u,v)) union(G, x, y) # Else discard the edge for i,j", "'large': task = large_index pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1] f =", "= 0 # An index variable, used for result[] for node in G.nodes:", "normal_edges.append((u, v)) # Step 1: Sort all the edges in non-decreasing # order", "i = int(i) print(n,i,r) if(int(r) > 10): task.append((n, i)) cores = multiprocessing.cpu_count() pool", "the # given graph, we can create a copy of graph if len(normal_edges)", "= list(range(1, 304)) med_index = list(range(304, 607)) large_index = list(range(607, 1007)) if tt", "depth): output = [] outgraphs = [] for u, v in G.edges: G.edges[u,", "len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O):", "task = [] small_index = list(range(1, 304)) med_index = list(range(304, 607)) large_index =", "solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {}", "write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt = sys.argv[1] cores = multiprocessing.cpu_count() pool =", "if T.degree[node] == 1: leaves += [node] leaves = sorted( leaves, key=lambda node:", "G.edges[u, v]['property'] == 'included': x = find(G, u) y = find(G ,v) union(G,", "'medium': task = med_index elif tt == 'large': task = large_index pool.map(solver_multi_threading, task)", "V subsets with single elements # Number of edges to be taken is", "y) # Else discard the edge for i,j in result: cost += P.edges[i,", "return result def union(G, x, y): xroot = find(G, x) yroot = find(G,", "result return result def union(G, x, y): xroot = find(G, x) yroot =", "G: networkx.Graph Returns: T: networkx.Graph \"\"\" result = [] STs = genST(G, depth)", "P.copy() P2 = P.copy() for u, v in P.graph['MST']: if P.edges[u, v]['property'] ==", "the index # of result for next edge if x != y: e", "depth: i += 1 result += [starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda", "def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves", "range(len(leaves)): if cnt < 3: G = T.copy() G.remove_node(leaves[i]) if is_valid_network(O,G): cnt +=", "P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] =", "if G.nodes[i]['parent'] == i: return i return find(G, G.nodes[i]['parent']) # Here's an example", "in P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property']", "'included': x = find(G, u) y = find(G ,v) union(G, x, y) result.append((u,", "the normal edges result =[] #This will store the resultant MST i =", "the resultant MST i = 0 # An index variable, used for sorted", "for u, v in G.edges: if G.edges[u, v]['property'] == 'included': x = find(G,", "0 # An index variable, used for sorted edges e = 0 #", "# weight. If we are not allowed to change the # given graph,", "v in G.edges: G.edges[u, v]['property'] = 'normal' List = {G} G.graph['MST'] = KruskalMST(G)", "# Step 2: Pick the smallest edge and increment # the index for", "= find(G, x) yroot = find(G, y) # Attach smaller rank tree under", "by one else : G.nodes[yroot]['parent'] = xroot G.nodes[xroot]['rank'] += 1 def find(G, i):", "=[] #This will store the resultant MST i = 0 # An index", "+= 1 newcost = average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray += [G] newGraphArray", "= line.split() (n, i) = l.split('-') i = int(i) print(n,i,r) if(int(r) > 10):", "deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O):", "== 'medium': task = med_index elif tt == 'large': task = large_index pool.map(solver_multi_threading,", "G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks", "i > len(sortedges) - 1: return [] u,v = sortedges[i] i = i", "1 x = find(G, u) y = find(G ,v) # If including this", "return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy()", "if newcost < oldcost: GraphArray += [G] if len(GraphArray) < 2: return deletenode(T,O)", "return result sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V subsets", "variable, used for result[] for node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] =", "+= [starter(ST,G)] print(\"MDT gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time()", "If we are not allowed to change the # given graph, we can", "if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] =", "+= 1 def find(G, i): if G.nodes[i]['parent'] == i: return i return find(G,", "variable, used for sorted edges e = 0 # An index variable, used", "from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time import multiprocessing def", "P1 = nx.Graph() P2 = nx.Graph() P1 = P.copy() P2 = P.copy() for", "= KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth == -1: while len(MST) != 0:", "leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for i in", "3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output", "result =[] #This will store the resultant MST i = 0 # An", "does't cause cycle, # include it in result and increment the index #", "len(GraphArray) < 2: return deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return", "< G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot #", "(n, i) = l.split('-') i = int(i) print(n,i,r) if(int(r) > 10): task.append((n, i))", "#print(\"Average pairwise distance: {}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt", "small_index = list(range(1, 304)) med_index = list(range(304, 607)) large_index = list(range(607, 1007)) if", "# An index variable, used for sorted edges e = 0 # An", "An index variable, used for sorted edges e = 0 # An index", "i = i + 1 x = find(G, u) y = find(G ,v)", "if tt == \"all\": task = large_index + med_index + small_index elif tt", "# Create V subsets with single elements # Number of edges to be", "# Usage: python3 solver.py def solver_multi_threading(i, depth = 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1])", "List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST) != 0 and len(output) <", "int(i) print(n,i,r) if(int(r) > 10): task.append((n, i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores)", "v)) else: normal_edges.append((u, v)) # Step 1: Sort all the edges in non-decreasing", "P = T.copy() for node in T.nodes: if T.degree[node] == 1: leaves +=", "result for next edge if x != y: e = e + 1", "if is_valid_network(O,G): newcost = average_pairwise_distance_fast(G) if newcost < oldcost: P = deletenode(G,O) return", "result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = [] P = T.copy() for", "G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs def Partition(P, List, MST): P1 =", "= sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0] else:", "result: cost += P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result return result", "return find(G, G.nodes[i]['parent']) # Here's an example of how to run your solver.", "depth): \"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph \"\"\" result = [] STs", "cost = 0 normal_edges = [] #store all the normal edges result =[]", "= large_index + med_index + small_index elif tt == 'small': task = small_index", "in P1.edges: if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) ==", "root of # high rank tree (Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']:", "[] P = T.copy() for node in T.nodes: if T.degree[node] == 1: leaves", "task = small_index elif tt == 'medium': task = med_index elif tt ==", ",v) union(G, x, y) result.append((u, v)) e += 1 elif G.edges[u, v]['property'] ==", "index for next iteration if i > len(sortedges) - 1: return [] u,v", "2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for", "G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for u, v in G.edges: if G.edges[u,", "else: while len(MST) != 0 and len(output) < depth: temp = min(List, key", "len(MST) != 0: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp)", "normal_edges = [] #store all the normal edges result =[] #This will store", "key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3]", "nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs", "elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray", "== 3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth):", "len(normal_edges) == 0: return result sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) #", "G.nodes[i]['parent']) # Here's an example of how to run your solver. # Usage:", "MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) else: while len(MST) != 0 and len(output) < depth:", "-1: while len(MST) != 0: temp = min(List, key = lambda g: g.graph['cost'])", "result.append((u, v)) e += 1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] =", "STs: weight = 0 for edge in ST.edges: weight += ST.edges[edge]['weight'] for ST", "= nx.Graph() P2 = nx.Graph() P1 = P.copy() P2 = P.copy() for u,", "G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v))", "result = [] STs = genST(G, depth) print(\"STs gen!\") i = 0 for", "# include it in result and increment the index # of result for", "of their # weight. If we are not allowed to change the #", "normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V subsets with single elements # Number", "xroot = find(G, x) yroot = find(G, y) # Attach smaller rank tree", "= 1000): path = \"inputs/{}-{}.in\".format(i[0], i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path)) T", "= P.copy() P2 = P.copy() for u, v in P.graph['MST']: if P.edges[u, v]['property']", "{}\".format(average_pairwise_distance_fast(T))) print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt = sys.argv[1] cores", "leaves = [] P = T.copy() for node in T.nodes: if T.degree[node] ==", "P.graph['cost'] = cost P.graph['MST'] = result return result def union(G, x, y): xroot", "l.split('-') i = int(i) print(n,i,r) if(int(r) > 10): task.append((n, i)) cores = multiprocessing.cpu_count()", "len(MST) != 0 and len(output) < depth: temp = min(List, key = lambda", "sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V subsets with single elements #", "= yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks are", "= G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs def Partition(P, List, MST): P1", "in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if", "Sort all the edges in non-decreasing # order of their # weight. If", "len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray =", "v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1)", "find(G, u) y = find(G ,v) union(G, x, y) result.append((u, v)) e +=", "1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G = P.copy() cost =", "2: return deletenode(T,O) elif len(GraphArray) == 2: return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O)", "if P1.edges[u, v]['property'] == 'excluded': P3.remove_edge(u, v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() -", "for ST in STs: if i < depth: i += 1 result +=", "Number of edges to be taken is equal to V-1 while e <", "elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent'] = xroot # If ranks are same, then", "output = [] outgraphs = [] for u, v in G.edges: G.edges[u, v]['property']", "used for result[] for node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0", "sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for i", "gen!\") result = sorted(result, key=lambda G: average_pairwise_distance_fast(G)) #t = time.time() - start_time #print(\"total", "= newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output = [] outgraphs = []", "> 10): task.append((n, i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading, task) if", "not allowed to change the # given graph, we can create a copy", "node in T.nodes: if T.degree[node] == 1: leaves += [node] leaves = sorted(", "= xroot # If ranks are same, then make one as root #", "return [] u,v = sortedges[i] i = i + 1 x = find(G,", "sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) for i in range(len(leaves)): G", "your solver. # Usage: python3 solver.py def solver_multi_threading(i, depth = 1000): path =", "to run your solver. # Usage: python3 solver.py def solver_multi_threading(i, depth = 1000):", "large_index = list(range(607, 1007)) if tt == \"all\": task = large_index + med_index", "[] STs = genST(G, depth) print(\"STs gen!\") i = 0 for ST in", "find(G, y) # Attach smaller rank tree under root of # high rank", "G.number_of_nodes() - 1: # Step 2: Pick the smallest edge and increment #", "edges to be taken is equal to V-1 while e < G.number_of_nodes() -", "v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1", "newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def genST(G, depth): output = [] outgraphs =", "time import multiprocessing def solve(G, depth): \"\"\" Args: G: networkx.Graph Returns: T: networkx.Graph", "!= 0 and len(output) < depth: temp = min(List, key = lambda g:", "for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs +=", "0 for u, v in G.edges: if G.edges[u, v]['property'] == 'included': x =", "G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v)) else: normal_edges.append((u, v)) # Step 1: Sort", "subsets with single elements # Number of edges to be taken is equal", "result def union(G, x, y): xroot = find(G, x) yroot = find(G, y)", "including this edge does't cause cycle, # include it in result and increment", "i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading, task) if __name__ == \"__main__\":", "x, y) result.append((u, v)) e += 1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u,", "newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray =", "= average_pairwise_distance_fast(G) if newcost < oldcost: newGraphArray += [G] newGraphArray = sorted( newGraphArray,", "success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt = sys.argv[1] cores = multiprocessing.cpu_count() pool", "< oldcost: newGraphArray += [G] newGraphArray = sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if", "and increment # the index for next iteration if i > len(sortedges) -", "average_pairwise_distance_fast import sys import time import multiprocessing def solve(G, depth): \"\"\" Args: G:", "we can create a copy of graph if len(normal_edges) == 0: return result", "- 1: return [] u,v = sortedges[i] i = i + 1 x", "for next iteration if i > len(sortedges) - 1: return [] u,v =", "= {G} G.graph['MST'] = KruskalMST(G) MST = {tuple(G.graph['MST'])} if depth == -1: while", "delete3node(newGraphArray,O) def genST(G, depth): output = [] outgraphs = [] for u, v", "under root of # high rank tree (Union by Rank) if G.nodes[xroot]['rank'] <", "v) if len(list(nx.dfs_edges(P3, source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy()", "+ med_index + small_index elif tt == 'small': task = small_index elif tt", "delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves =", "'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if", "v]['property'] = 'included' MSTP1 = KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1 is", "GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T) leaves = [] for node", "i = 0 # An index variable, used for sorted edges e =", "edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P]", "= open(path, 'r') lines = f.readlines() task = [] for line in lines:", "newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in", "for node in T.nodes: if T.degree[node] == 1: leaves += [node] leaves =", "def Partition(P, List, MST): P1 = nx.Graph() P2 = nx.Graph() P1 = P.copy()", "# If including this edge does't cause cycle, # include it in result", "1: leaves += [node] leaves = sorted( leaves, key=lambda node: T.edges[ (list(T[node])[0], node)", "{} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt = sys.argv[1] cores = multiprocessing.cpu_count()", "== 'large': task = large_index pool.map(solver_multi_threading, task) def p_main(): path = sys.argv[1] f", "P.edges[i, j]['weight'] P.graph['cost'] = cost P.graph['MST'] = result return result def union(G, x,", "else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T", "if(int(r) > 10): task.append((n, i)) cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) pool.map(solver_multi_threading, task)", "store the resultant MST i = 0 # An index variable, used for", "i) = l.split('-') i = int(i) print(n,i,r) if(int(r) > 10): task.append((n, i)) cores", "this edge does't cause cycle, # include it in result and increment the", "parse import read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import", "sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray", "KruskalMST(P1) P1.graph['MST'] = MSTP1 #check if P1 is connected P3 = P1.copy() for", "= nx.Graph() P1 = P.copy() P2 = P.copy() for u, v in P.graph['MST']:", "KruskalMST(P): G = P.copy() cost = 0 normal_edges = [] #store all the", "node in G.nodes: G.nodes[node]['parent'] = node G.nodes[node]['rank'] = 0 for u, v in", "+= 1 elif G.edges[u, v]['property'] == 'excluded': G.edges[u, v]['weight'] = 1000 normal_edges.append((u, v))", "source=1))) == P3.number_of_nodes() - 1: List.add(P1) MST.add(tuple(MSTP1)) P1 = P2.copy() def KruskalMST(P): G", "tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return", "'small': task = small_index elif tt == 'medium': task = med_index elif tt", "= nx.Graph() for edge in edges: P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight']", "Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] > G.nodes[yroot]['rank']: G.nodes[yroot]['parent']", "Partition(P, List, MST): P1 = nx.Graph() P2 = nx.Graph() P1 = P.copy() P2", "T.degree[node] == 1: leaves += [node] leaves = sorted( leaves, key=lambda node: T.edges[", "genST(G, depth): output = [] outgraphs = [] for u, v in G.edges:", "(Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent'] = yroot elif G.nodes[xroot]['rank'] >", "== 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] = 'included' MSTP1 = KruskalMST(P1)", "if newcost < oldcost: P = deletenode(G,O) return P return P def starter(T,O):", "cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores) task = [] small_index = list(range(1, 304))", "Here's an example of how to run your solver. # Usage: python3 solver.py", "find(G ,v) union(G, x, y) result.append((u, v)) e += 1 elif G.edges[u, v]['property']", "[] outgraphs = [] for u, v in G.edges: G.edges[u, v]['property'] = 'normal'", "!= 0: temp = min(List, key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST']))", "x, y): xroot = find(G, x) yroot = find(G, y) # Attach smaller", "oldcost = average_pairwise_distance_fast(T) leaves = [] P = T.copy() for node in T.nodes:", "takes:%d\"%t) #print(result[0]) return result[0] def deletenode(T,O): oldcost = average_pairwise_distance_fast(T) leaves = [] P", "key = lambda g: g.graph['cost']) output.append(temp.graph['MST']) List.remove(temp) MST.remove(tuple(temp.graph['MST'])) Partition(temp, List, MST) for edges", "i + 1 x = find(G, u) y = find(G ,v) # If", "key=lambda edge: G.edges[edge]['weight']) # Create V subsets with single elements # Number of", "0: return result sortedges = sorted( normal_edges, key=lambda edge: G.edges[edge]['weight']) # Create V", "i[1]) G = read_input_file(path) print(\"Input {} success!\".format(path)) T = solve(G, depth) #print(\"Average pairwise", "= sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0] else:", "sorted( newGraphArray, key=lambda tree: average_pairwise_distance_fast(tree)) if len(newGraphArray) == 3: return newGraphArray[0] else: newGraphArray", "read_input_file, write_output_file from Utility import is_valid_network, average_pairwise_distance, average_pairwise_distance_fast import sys import time import", "oldcost = average_pairwise_distance_fast(T) leaves = [] for node in T.nodes: if T.degree[node] ==", "List, MST): P1 = nx.Graph() P2 = nx.Graph() P1 = P.copy() P2 =", "P.add_edge(edge[0], edge[1]) P.edges[edge[0], edge[1]]['weight'] = G.edges[edge[0], edge[1]]['weight'] outgraphs += [P] return outgraphs def", "0 for edge in ST.edges: weight += ST.edges[edge]['weight'] for ST in STs: if", "tree under root of # high rank tree (Union by Rank) if G.nodes[xroot]['rank']", "average_pairwise_distance_fast(tree)) if len(newGraphArray) == 2: return newGraphArray[0] else: newGraphArray = newGraphArray[:3] return delete3node(newGraphArray,O)", "of # high rank tree (Union by Rank) if G.nodes[xroot]['rank'] < G.nodes[yroot]['rank']: G.nodes[xroot]['parent']", "= average_pairwise_distance_fast(G) if newcost < oldcost: P = deletenode(G,O) return P return P", "print(\"Output {} success!\".format(path)) write_output_file(\"outputs/{}-{}.out\".format(i[0], i[1]), T) def main(): tt = sys.argv[1] cores =", "List, MST) else: while len(MST) != 0 and len(output) < depth: temp =", "G.edges[edge]['weight']) # Create V subsets with single elements # Number of edges to", "delete3node(newGraphArray,O) def delete3node(GraphArray,O): newGraphArray = GraphArray.copy() for T in GraphArray: oldcost = average_pairwise_distance_fast(T)", "= find(G, u) y = find(G ,v) # If including this edge does't", "ST.edges: weight += ST.edges[edge]['weight'] for ST in STs: if i < depth: i", "P1.graph['MST'] = MSTP1 #check if P1 is connected P3 = P1.copy() for u,", "for edge in ST.edges: weight += ST.edges[edge]['weight'] for ST in STs: if i", "= 0 for u, v in G.edges: if G.edges[u, v]['property'] == 'included': x", "node: T.edges[ (list(T[node])[0], node) ]['weight'],reverse=True) cnt = 0 for i in range(len(leaves)): if", "P.graph['MST']: if P.edges[u, v]['property'] == 'normal': P1.edges[u, v]['property'] = 'excluded' P2.edges[u, v]['property'] =", "return delete3node_S(GraphArray,O) else: return delete3node(GraphArray[:3], O) def delete3node_S(GraphArray,O): newGraphArray = GraphArray.copy() for T", "= [] for u, v in G.edges: G.edges[u, v]['property'] = 'normal' List =", "if i < depth: i += 1 result += [starter(ST,G)] print(\"MDT gen!\") result", "i[1]), T) def main(): tt = sys.argv[1] cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes=cores)", "cycle, # include it in result and increment the index # of result", "xroot G.nodes[xroot]['rank'] += 1 def find(G, i): if G.nodes[i]['parent'] == i: return i", "MST) else: while len(MST) != 0 and len(output) < depth: temp = min(List,", "one as root # and increment its rank by one else : G.nodes[yroot]['parent']" ]
[ "ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin self.rho", "\"\"\" #;+ #; NAME: #; cgm.core #; Version 1.0 #; #; PURPOSE: #;", "None: from astropy.cosmology import WMAP9 as cosmo if verbose is True: print('cgm.core: Using", "float, Quantity DEC for galaxy gal_z: float Galaxy redshift bg_ra: str, float, Quantity", "= CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem = bg_z", "background source Attributes ---------- rho: float Impact parameter (u.kpc) JXP on 29 Nov", "NAME: #; cgm.core #; Version 1.0 #; #; PURPOSE: #; Module for core", "from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs import radec as", "float Galaxy redshift bg_ra: str, float, Quantity RA for background source bg_dec: str,", "this is.\"\"\" return 'CGM' # ###################### ####################### # Testing if __name__ == '__main__':", "the type of vehicle this is.\"\"\" return 'CGM' # ###################### ####################### # Testing", "is True: print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z", "CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\" def __init__(self): #", "core routines of CGM analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from", "---------- rho: float Impact parameter (u.kpc) JXP on 29 Nov 2014 \"\"\" #", "{:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing the type", "numpy as np from astropy import units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem", "Class for a CGM system Combines absorption lines with a Galaxy Inputs: ----------", "float Redshift of background source Attributes ---------- rho: float Impact parameter (u.kpc) JXP", "absorption lines with a Galaxy Inputs: ---------- gal_ra: str, float, Quantity RA for", "self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin self.rho = ang_sep", "system Combines absorption lines with a Galaxy Inputs: ---------- gal_ra: str, float, Quantity", "for a CGM system Combines absorption lines with a Galaxy Inputs: ---------- gal_ra:", "# Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background", "RA for galaxy gal_dec: str, float, Quantity DEC for galaxy gal_z: float Galaxy", "Galaxy from xastropy.obs import radec as xra from xastropy.xutils import xdebug as xdb", "= None # Output def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True),", "string representing the type of vehicle this is.\"\"\" return 'CGM' # ###################### #######################", "print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle this is.\"\"\" return 'CGM'", "JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import, division, unicode_literals import numpy", "# Background source self.abs_sys.zem = bg_z # Calcualte rho if cosmo is None:", "from xastropy.xutils import xdebug as xdb from xastropy.xutils import arrays as xu_array #", "/ (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing the type", "self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle", "CGMSys(object): \"\"\" Class for a CGM system Combines absorption lines with a Galaxy", "= ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord(", "if __name__ == '__main__': # Initialize tmp = CGMAbs() print(tmp) tmp2 = CGMAbsSurvey()", "Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\" def __init__(self): # Generate", "def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy", "arrays as xu_array # Path for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ##########################", "ang_sep * kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string", "this is.\"\"\" return 'CGM' # Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g},", "xdb from xastropy.xutils import arrays as xu_array # Path for xastropy #xa_path =", "def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self):", "with type AbslineSystem.__init__(self,'CGM') # Init self.ions = None # Output def __repr__(self): return", "is.\"\"\" return 'CGM' # Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format(", "from __future__ import print_function, absolute_import, division, unicode_literals import numpy as np from astropy", "self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem = bg_z # Calcualte", "u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs import radec", "= ang_sep * kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a", "source Attributes ---------- rho: float Impact parameter (u.kpc) JXP on 29 Nov 2014", "xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object): \"\"\" Class for", "\"\"\"\"Return a string representing the type of vehicle this is.\"\"\" return 'CGM' #", "self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM", "# Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions = None # Output def", "self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A", "the type of vehicle this is.\"\"\" return 'CGM' # Output def __repr__(self): return", "as xra from xastropy.xutils import xdebug as xdb from xastropy.xutils import arrays as", "source bg_dec: str, float, Quantity DEC for background source bg_z: float Redshift of", "__init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions = None # Output", "kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing the", "Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions = None # Output def __repr__(self):", "gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z)", "import radec as xra from xastropy.xutils import xdebug as xdb from xastropy.xutils import", "Quantity RA for background source bg_dec: str, float, Quantity DEC for background source", "from astropy import units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import", "# kpc per arcmin self.rho = ang_sep * kpc_amin / (1+self.galaxy.z) # Physical", "('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for", "self.rho)) # Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes:", "of CGM analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import", "CGM system Combines absorption lines with a Galaxy Inputs: ---------- gal_ra: str, float,", "self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption", "__name__ == '__main__': # Initialize tmp = CGMAbs() print(tmp) tmp2 = CGMAbsSurvey() print(tmp2)", "CGM analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function,", "a string representing the type of vehicle this is.\"\"\" return 'CGM' # Output", "source self.abs_sys.zem = bg_z # Calcualte rho if cosmo is None: from astropy.cosmology", "gal_ra: str, float, Quantity RA for galaxy gal_dec: str, float, Quantity DEC for", "of vehicle this is.\"\"\" return 'CGM' # ###################### ####################### # Testing if __name__", "if verbose is True: print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin =", "of background source Attributes ---------- rho: float Impact parameter (u.kpc) JXP on 29", "########################## ########################## ########################## class CGMSys(object): \"\"\" Class for a CGM system Combines absorption", "Galaxy Inputs: ---------- gal_ra: str, float, Quantity RA for galaxy gal_dec: str, float,", "CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem = bg_z #", "for background source bg_dec: str, float, Quantity DEC for background source bg_z: float", "return 'CGM' # Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__,", "string representing the type of vehicle this is.\"\"\" return 'CGM' # Output def", "on 29 Nov 2014 \"\"\" # Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra,", "vehicle this is.\"\"\" return 'CGM' # ###################### ####################### # Testing if __name__ ==", "Nov 2014 \"\"\" # Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z,", "float, Quantity RA for background source bg_dec: str, float, Quantity DEC for background", "# Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): #", "by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import, division, unicode_literals import", "__init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy =", "import AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs import radec as xra from", "# Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs()", "Output def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def", "gal_dec: str, float, Quantity DEC for galaxy gal_z: float Galaxy redshift bg_ra: str,", "a string representing the type of vehicle this is.\"\"\" return 'CGM' # ######################", "astropy import units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy", "zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM Absorption class", "Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\" def", "type of vehicle this is.\"\"\" return 'CGM' # Output def __repr__(self): return ('[{:s}:", "float Impact parameter (u.kpc) JXP on 29 Nov 2014 \"\"\" # Initialize def", "AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs import radec as xra from xastropy.xutils", "is None: from astropy.cosmology import WMAP9 as cosmo if verbose is True: print('cgm.core:", "self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) #", "representing the type of vehicle this is.\"\"\" return 'CGM' # ###################### ####################### #", "xu_array # Path for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class", "self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) )", "True: print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z )", "# Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\"", "xastropy.xutils import xdebug as xdb from xastropy.xutils import arrays as xu_array # Path", "arcmin self.rho = ang_sep * kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self):", "xastropy.xutils import arrays as xu_array # Path for xastropy #xa_path = imp.find_module('xastropy')[1] ##########################", "is.\"\"\" return 'CGM' # ###################### ####################### # Testing if __name__ == '__main__': #", "verbose is True: print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin(", "def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle this is.\"\"\" return", ") # Background source self.abs_sys.zem = bg_z # Calcualte rho if cosmo is", "str, float, Quantity DEC for galaxy gal_z: float Galaxy redshift bg_ra: str, float,", "Testing if __name__ == '__main__': # Initialize tmp = CGMAbs() print(tmp) tmp2 =", "WMAP9 as cosmo if verbose is True: print('cgm.core: Using WMAP9 cosmology') ang_sep =", "for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\" def __init__(self):", "Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z,", "rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM Absorption class CGMAbs(AbslineSystem):", "source bg_z: float Redshift of background source Attributes ---------- rho: float Impact parameter", "Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle this", "vehicle this is.\"\"\" return 'CGM' # Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s},", "Galaxy redshift bg_ra: str, float, Quantity RA for background source bg_dec: str, float,", "gal_dec, z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys", "#;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import, division, unicode_literals import numpy as", "as xdb from xastropy.xutils import arrays as xu_array # Path for xastropy #xa_path", "None # Output def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True),", "self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) #", "astropy.cosmology import WMAP9 as cosmo if verbose is True: print('cgm.core: Using WMAP9 cosmology')", "as xu_array # Path for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ##########################", "system Attributes: \"\"\" def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions", "str, float, Quantity RA for background source bg_dec: str, float, Quantity DEC for", "Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord", "Path for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object): \"\"\"", "RA for background source bg_dec: str, float, Quantity DEC for background source bg_z:", "#; NAME: #; cgm.core #; Version 1.0 #; #; PURPOSE: #; Module for", "import units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy from", "bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name", "'CGM' # ###################### ####################### # Testing if __name__ == '__main__': # Initialize tmp", "xra from xastropy.xutils import xdebug as xdb from xastropy.xutils import arrays as xu_array", "xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs import radec as xra", "# Calcualte rho if cosmo is None: from astropy.cosmology import WMAP9 as cosmo", "self.rho = ang_sep * kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return", "(1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing the type of", "__future__ import print_function, absolute_import, division, unicode_literals import numpy as np from astropy import", "= Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption", "self.galaxy.z ) # kpc per arcmin self.rho = ang_sep * kpc_amin / (1+self.galaxy.z)", "a CGM system Combines absorption lines with a Galaxy Inputs: ---------- gal_ra: str,", "bg_z # Calcualte rho if cosmo is None: from astropy.cosmology import WMAP9 as", "__repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return", "as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs import", "print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) #", "Combines absorption lines with a Galaxy Inputs: ---------- gal_ra: str, float, Quantity RA", "1.0 #; #; PURPOSE: #; Module for core routines of CGM analysis #;", "background source bg_z: float Redshift of background source Attributes ---------- rho: float Impact", "self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle this is.\"\"\"", "(bg_ra,bg_dec) ) # Background source self.abs_sys.zem = bg_z # Calcualte rho if cosmo", "np from astropy import units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core", "(u.kpc) JXP on 29 Nov 2014 \"\"\" # Initialize def __init__(self, gal_ra, gal_dec,", "return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class", "# Path for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object):", "Quantity DEC for galaxy gal_z: float Galaxy redshift bg_ra: str, float, Quantity RA", "bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) #", "Background source self.abs_sys.zem = bg_z # Calcualte rho if cosmo is None: from", "CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\" def __init__(self): # Generate with type", "Attributes: \"\"\" def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions =", "#; #; PURPOSE: #; Module for core routines of CGM analysis #; 29-Nov-2014", "parameter (u.kpc) JXP on 29 Nov 2014 \"\"\" # Initialize def __init__(self, gal_ra,", "#;+ #; NAME: #; cgm.core #; Version 1.0 #; #; PURPOSE: #; Module", "imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object): \"\"\" Class for a CGM system", "kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin self.rho = ang_sep *", "cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin self.rho = ang_sep * kpc_amin /", "cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin", "self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem =", "AbslineSystem.__init__(self,'CGM') # Init self.ions = None # Output def __repr__(self): return ('[{:s}: {:s}", "RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM Absorption", "JXP on 29 Nov 2014 \"\"\" # Initialize def __init__(self, gal_ra, gal_dec, gal_z,", "__repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) #", "#; Module for core routines of CGM analysis #; 29-Nov-2014 by JXP #;-", "self.abs_sys.zem = bg_z # Calcualte rho if cosmo is None: from astropy.cosmology import", "representing the type of vehicle this is.\"\"\" return 'CGM' # Output def __repr__(self):", "kpc per arcmin self.rho = ang_sep * kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace()", "\"\"\" # Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False):", "self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing the type of", "########################## ########################## ########################## ########################## class CGMSys(object): \"\"\" Class for a CGM system Combines", "import numpy as np from astropy import units as u from xastropy.igm.abs_sys.abssys_utils import", "---------- gal_ra: str, float, Quantity RA for galaxy gal_dec: str, float, Quantity DEC", "2014 \"\"\" # Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None,", "as cosmo if verbose is True: print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin')", "# Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+", ") # kpc per arcmin self.rho = ang_sep * kpc_amin / (1+self.galaxy.z) #", "from astropy.cosmology import WMAP9 as cosmo if verbose is True: print('cgm.core: Using WMAP9", "#xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle this is.\"\"\"", "# ###################### ####################### # Testing if __name__ == '__main__': # Initialize tmp =", "= self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin self.rho =", "type AbslineSystem.__init__(self,'CGM') # Init self.ions = None # Output def __repr__(self): return ('[{:s}:", "= imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object): \"\"\" Class for a CGM", "analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import,", "unicode_literals import numpy as np from astropy import units as u from xastropy.igm.abs_sys.abssys_utils", "Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho)) # Class for CGM", "('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string", "Inputs: ---------- gal_ra: str, float, Quantity RA for galaxy gal_dec: str, float, Quantity", "return 'CGM' # ###################### ####################### # Testing if __name__ == '__main__': # Initialize", "# Testing if __name__ == '__main__': # Initialize tmp = CGMAbs() print(tmp) tmp2", "#; cgm.core #; Version 1.0 #; #; PURPOSE: #; Module for core routines", "self.ions = None # Output def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__,", "absolute_import, division, unicode_literals import numpy as np from astropy import units as u", "{:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing", "29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import, division, unicode_literals", "xastropy.galaxy.core import Galaxy from xastropy.obs import radec as xra from xastropy.xutils import xdebug", "bg_ra: str, float, Quantity RA for background source bg_dec: str, float, Quantity DEC", "def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions = None #", "self.galaxy.z, self.rho)) # Class for CGM Absorption class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system", "from xastropy.obs import radec as xra from xastropy.xutils import xdebug as xdb from", "self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle this", "Module for core routines of CGM analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------", "Calcualte rho if cosmo is None: from astropy.cosmology import WMAP9 as cosmo if", "DEC for background source bg_z: float Redshift of background source Attributes ---------- rho:", "str, float, Quantity DEC for background source bg_z: float Redshift of background source", "from xastropy.xutils import arrays as xu_array # Path for xastropy #xa_path = imp.find_module('xastropy')[1]", "rho if cosmo is None: from astropy.cosmology import WMAP9 as cosmo if verbose", "radec as xra from xastropy.xutils import xdebug as xdb from xastropy.xutils import arrays", "xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem = bg_z # Calcualte rho if", "if cosmo is None: from astropy.cosmology import WMAP9 as cosmo if verbose is", "#;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import, division, unicode_literals import numpy as np", "Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system", "Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True))", "Quantity RA for galaxy gal_dec: str, float, Quantity DEC for galaxy gal_z: float", "Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy", "Impact parameter (u.kpc) JXP on 29 Nov 2014 \"\"\" # Initialize def __init__(self,", "########################## class CGMSys(object): \"\"\" Class for a CGM system Combines absorption lines with", "# Init self.ions = None # Output def __repr__(self): return ('[{:s}: {:s} {:s},", "galaxy gal_z: float Galaxy redshift bg_ra: str, float, Quantity RA for background source", "('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec)", "for galaxy gal_z: float Galaxy redshift bg_ra: str, float, Quantity RA for background", "Attributes ---------- rho: float Impact parameter (u.kpc) JXP on 29 Nov 2014 \"\"\"", "Init self.ions = None # Output def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format(", "import xdebug as xdb from xastropy.xutils import arrays as xu_array # Path for", "# Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing the type of vehicle", "bg_dec: str, float, Quantity DEC for background source bg_z: float Redshift of background", "routines of CGM analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__", "per arcmin self.rho = ang_sep * kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def", "float, Quantity RA for galaxy gal_dec: str, float, Quantity DEC for galaxy gal_z:", "= bg_z # Calcualte rho if cosmo is None: from astropy.cosmology import WMAP9", "cosmo is None: from astropy.cosmology import WMAP9 as cosmo if verbose is True:", "print_function, absolute_import, division, unicode_literals import numpy as np from astropy import units as", "'CGM' # Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True),", "from xastropy.galaxy.core import Galaxy from xastropy.obs import radec as xra from xastropy.xutils import", "background source bg_dec: str, float, Quantity DEC for background source bg_z: float Redshift", "a Galaxy Inputs: ---------- gal_ra: str, float, Quantity RA for galaxy gal_dec: str,", "z=gal_z) # Name self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys =", "\"\"\" Class for a CGM system Combines absorption lines with a Galaxy Inputs:", "class CGMSys(object): \"\"\" Class for a CGM system Combines absorption lines with a", "def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True), self.galaxy.z, self.rho))", "type of vehicle this is.\"\"\" return 'CGM' # ###################### ####################### # Testing if", "gal_z: float Galaxy redshift bg_ra: str, float, Quantity RA for background source bg_dec:", "#; Version 1.0 #; #; PURPOSE: #; Module for core routines of CGM", "{:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a string representing the", "# Output def __repr__(self): return ('[{:s}: Galaxy RA/DEC={:s}{:s}, zgal={:g}, rho={:g}]'.format( self.__class__.__name__, self.abs_sys.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.abs_sys.coord.dec.to_string(sep=':',pad=True,alwayssign=True),", "rho: float Impact parameter (u.kpc) JXP on 29 Nov 2014 \"\"\" # Initialize", "with a Galaxy Inputs: ---------- gal_ra: str, float, Quantity RA for galaxy gal_dec:", "#; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\" from __future__ import print_function, absolute_import, division,", "= cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per arcmin self.rho = ang_sep * kpc_amin", "class CGMAbs(AbslineSystem): \"\"\"A CGM absorption system Attributes: \"\"\" def __init__(self): # Generate with", "* kpc_amin / (1+self.galaxy.z) # Physical #xdb.set_trace() def print_abs_type(self): \"\"\"\"Return a string representing", "###################### ####################### # Testing if __name__ == '__main__': # Initialize tmp = CGMAbs()", "import print_function, absolute_import, division, unicode_literals import numpy as np from astropy import units", "gal_ra, gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra,", "return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs)) def print_abs_type(self): \"\"\"\"Return a", "self.name = ('CGM'+ self.galaxy.coord.ra.to_string(unit=u.hour,sep='',pad=True)+ self.galaxy.coord.dec.to_string(sep='',pad=True,alwayssign=True)) # Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord =", "DEC for galaxy gal_z: float Galaxy redshift bg_ra: str, float, Quantity RA for", "verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name = ('CGM'+", "\"\"\"A CGM absorption system Attributes: \"\"\" def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM')", "####################### # Testing if __name__ == '__main__': # Initialize tmp = CGMAbs() print(tmp)", "Absorption system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background source", "for core routines of CGM analysis #; 29-Nov-2014 by JXP #;- #;------------------------------------------------------------------------------ \"\"\"", "division, unicode_literals import numpy as np from astropy import units as u from", "xastropy.obs import radec as xra from xastropy.xutils import xdebug as xdb from xastropy.xutils", "Version 1.0 #; #; PURPOSE: #; Module for core routines of CGM analysis", "WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc per", "PURPOSE: #; Module for core routines of CGM analysis #; 29-Nov-2014 by JXP", "cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name =", "lines with a Galaxy Inputs: ---------- gal_ra: str, float, Quantity RA for galaxy", "<filename>xastropy/cgm/core.py \"\"\" #;+ #; NAME: #; cgm.core #; Version 1.0 #; #; PURPOSE:", "as np from astropy import units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from", "import arrays as xu_array # Path for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ##########################", "redshift bg_ra: str, float, Quantity RA for background source bg_dec: str, float, Quantity", "units as u from xastropy.igm.abs_sys.abssys_utils import AbslineSystem from xastropy.galaxy.core import Galaxy from xastropy.obs", "= xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem = bg_z # Calcualte rho", "absorption system Attributes: \"\"\" def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') # Init", "Quantity DEC for background source bg_z: float Redshift of background source Attributes ----------", "########################## ########################## class CGMSys(object): \"\"\" Class for a CGM system Combines absorption lines", "Redshift of background source Attributes ---------- rho: float Impact parameter (u.kpc) JXP on", "float, Quantity DEC for background source bg_z: float Redshift of background source Attributes", "\"\"\" def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') # Init self.ions = None", "import Galaxy from xastropy.obs import radec as xra from xastropy.xutils import xdebug as", "\"\"\" from __future__ import print_function, absolute_import, division, unicode_literals import numpy as np from", "# Output def __repr__(self): return ('[{:s}: {:s} {:s}, {:g}]'.format( self.__class__.__name__, self.coord.ra.to_string(unit=u.hour,sep=':',pad=True), self.coord.dec.to_string(sep=':',pad=True), self.zabs))", "cosmo if verbose is True: print('cgm.core: Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin", "#; PURPOSE: #; Module for core routines of CGM analysis #; 29-Nov-2014 by", "bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec, z=gal_z) # Name self.name", "for galaxy gal_dec: str, float, Quantity DEC for galaxy gal_z: float Galaxy redshift", "29 Nov 2014 \"\"\" # Initialize def __init__(self, gal_ra, gal_dec, gal_z, bg_ra, bg_dec,", "of vehicle this is.\"\"\" return 'CGM' # Output def __repr__(self): return ('[{:s}: Galaxy", "system self.abs_sys = CGMAbs() self.abs_sys.coord = xra.to_coord( (bg_ra,bg_dec) ) # Background source self.abs_sys.zem", "import WMAP9 as cosmo if verbose is True: print('cgm.core: Using WMAP9 cosmology') ang_sep", "str, float, Quantity RA for galaxy gal_dec: str, float, Quantity DEC for galaxy", "for xastropy #xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object): \"\"\" Class", "CGM absorption system Attributes: \"\"\" def __init__(self): # Generate with type AbslineSystem.__init__(self,'CGM') #", "xdebug as xdb from xastropy.xutils import arrays as xu_array # Path for xastropy", "Using WMAP9 cosmology') ang_sep = self.abs_sys.coord.separation(self.galaxy.coord).to('arcmin') kpc_amin = cosmo.kpc_comoving_per_arcmin( self.galaxy.z ) # kpc", "galaxy gal_dec: str, float, Quantity DEC for galaxy gal_z: float Galaxy redshift bg_ra:", "#xa_path = imp.find_module('xastropy')[1] ########################## ########################## ########################## ########################## class CGMSys(object): \"\"\" Class for a", "gal_dec, gal_z, bg_ra, bg_dec, bg_z, cosmo=None, verbose=False): # Galaxy self.galaxy = Galaxy(gal_ra, gal_dec,", "for background source bg_z: float Redshift of background source Attributes ---------- rho: float", "bg_z: float Redshift of background source Attributes ---------- rho: float Impact parameter (u.kpc)", "cgm.core #; Version 1.0 #; #; PURPOSE: #; Module for core routines of" ]
[ "send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail take at", "{count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body, \"<EMAIL>\", to_addresses) def send_msg(message): time.sleep(1) message.send()", "#Make an e-mail take at least one second to slow down for SES", "to slow down for SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg =", "def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take at least one second to", "for SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email:", "SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to", "at least one second to slow down for SES student = Student.objects.get(pk=student_id) template", "msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take at", "= \"Your e-mail was sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message", "override_email=None): time.sleep(1) #Make an e-mail take at least one second to slow down", "msg = template.get_message(student) if override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1)", "override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take", "an e-mail take at least one second to slow down for SES body", "= StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses,", "one second to slow down for SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id)", "if override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail", "[override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take at least one", "following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body, \"<EMAIL>\", to_addresses) def send_msg(message): time.sleep(1)", "to_addresses): time.sleep(1) #Make an e-mail take at least one second to slow down", "send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail take at least one second to", "def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail take at least one second", "take at least one second to slow down for SES student = Student.objects.get(pk=student_id)", "template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to = [override_email] msg.send() def", "to slow down for SES body = \"Your e-mail was sent to the", "import send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail take", "<reponame>rectory-school/rectory-apps<filename>courseevaluations/lib/async.py from courseevaluations.models import StudentEmailTemplate from academics.models import Student from django.core.mail import send_mail", "sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body, \"<EMAIL>\", to_addresses)", "Student from django.core.mail import send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make", "academics.models import Student from django.core.mail import send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None):", "StudentEmailTemplate from academics.models import Student from django.core.mail import send_mail import time def send_student_email_from_template(template_id,", "student_id, override_email=None): time.sleep(1) #Make an e-mail take at least one second to slow", "take at least one second to slow down for SES body = \"Your", "Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to = [override_email] msg.send()", "down for SES body = \"Your e-mail was sent to the following {count:}", "e-mail take at least one second to slow down for SES student =", "the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body, \"<EMAIL>\", to_addresses) def send_msg(message):", "for SES body = \"Your e-mail was sent to the following {count:} people:", "= template.get_message(student) if override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make", "from academics.models import Student from django.core.mail import send_mail import time def send_student_email_from_template(template_id, student_id,", "= [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take at least", "e-mail take at least one second to slow down for SES body =", "import Student from django.core.mail import send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1)", "an e-mail take at least one second to slow down for SES student", "body = \"Your e-mail was sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses))", "slow down for SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student)", "at least one second to slow down for SES body = \"Your e-mail", "template.get_message(student) if override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an", "send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take at least one second to slow", "django.core.mail import send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail", "one second to slow down for SES body = \"Your e-mail was sent", "time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail take at least one", "least one second to slow down for SES student = Student.objects.get(pk=student_id) template =", "= Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to = [override_email]", "\"Your e-mail was sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\",", "StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to = [override_email] msg.send() def send_confirmation_email(addresses, to_addresses):", "SES body = \"Your e-mail was sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses),", "time.sleep(1) #Make an e-mail take at least one second to slow down for", "import StudentEmailTemplate from academics.models import Student from django.core.mail import send_mail import time def", "down for SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if", "student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg = template.get_message(student) if override_email: msg.to =", "least one second to slow down for SES body = \"Your e-mail was", "to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body, \"<EMAIL>\", to_addresses) def", "import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an e-mail take at least", "second to slow down for SES student = Student.objects.get(pk=student_id) template = StudentEmailTemplate.objects.get(pk=template_id) msg", "msg.send() def send_confirmation_email(addresses, to_addresses): time.sleep(1) #Make an e-mail take at least one second", "e-mail was sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body,", "second to slow down for SES body = \"Your e-mail was sent to", "was sent to the following {count:} people: \\n\\n{addresses:}\".format(count=len(addresses), addresses=\"\\n\".join(addresses)) send_mail(\"Message confirmation\", body, \"<EMAIL>\",", "from courseevaluations.models import StudentEmailTemplate from academics.models import Student from django.core.mail import send_mail import", "from django.core.mail import send_mail import time def send_student_email_from_template(template_id, student_id, override_email=None): time.sleep(1) #Make an", "slow down for SES body = \"Your e-mail was sent to the following", "courseevaluations.models import StudentEmailTemplate from academics.models import Student from django.core.mail import send_mail import time" ]
[ "assert (<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode():", "finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class", "not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__()", "/>.value) is False # but this only works in non-dev mode assert (<Foo", "does not validate data if off.\"\"\" from typing import Union import pytest from", "(<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo", "not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True)", "in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert", "not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode()", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert", "pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int complex:", "assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the", "not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__", "off.\"\"\" from typing import Union import pytest from mixt.internal.base import Base from mixt.exceptions", "in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_global_default_dev_mode_is_true():", "PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__()", "assert in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False):", "restore the normal state def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert not", "/>.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} />", "restore the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try:", "assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with", "def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode()", "is True assert (<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' />", "not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert", "Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes", "_to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__()", "def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False):", "override_dev_mode(dev_mode=False): # normal behavior still works assert (<Foo value='value' />.value) is True assert", "with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert", "pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not", "in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert", "in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal", "value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0}", "in_dev_mode from mixt.proptypes import Choices class DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true():", "not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert", "if off.\"\"\" from typing import Union import pytest from mixt.internal.base import Base from", "PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated", "test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__()", "force restore the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode()", "can be toggled and does not validate data if off.\"\"\" from typing import", "assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__()", "state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not", "override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not", "assert (<Foo value='value' />.value) is True assert (<Foo value={False} />.value) is False assert", "set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force", "float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo' assert (<Foo complex='bar' />.complex)", "assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__", "not validate data if off.\"\"\" from typing import Union import pytest from mixt.internal.base", "with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert", "assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() #", "that dev-mode can be toggled and does not validate data if off.\"\"\" from", "pytest from mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes", "finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_proptypes_context_manager(): assert", "value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c'", "not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False)", "assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert not", "assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the", "True # force restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes:", "pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool with", "from mixt.proptypes import Choices class DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert", "not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True", "assert (<Foo value='fake' />.value) is True assert (<Foo value={0} />.value) is False with", "def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int complex: Union[int, float] with override_dev_mode(dev_mode=False):", "restore the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not", "normal state def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode() assert", "PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__()", "assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True):", "BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import", "in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force", "but this only works in non-dev mode assert (<Foo value='fake' />.value) is True", "/>.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo' /> with pytest.raises(InvalidPropValueError): <Foo complex='bar' />", "override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def", "value: int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo'", "with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool", "int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo' assert", "# normal behavior still works assert (<Foo value='value' />.value) is True assert (<Foo", "value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int complex: Union[int, float]", "PropTypes.__dev_mode__ = True # force restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase):", "Union import pytest from mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError", "not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False)", "True assert (<Foo value={False} />.value) is False assert (<Foo value='false' />.value) is False", "not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False)", "force restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices", "coding: mixt \"\"\"Ensure that dev-mode can be toggled and does not validate data", "state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices = ['a', 'b'] with", "(<Foo value='fake' />.value) is True assert (<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError):", "in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not", "mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes", "be toggled and does not validate data if off.\"\"\" from typing import Union", "state def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode()", "assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode()", "assert not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode()", "value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase):", "InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode,", "this only works in non-dev mode assert (<Foo value='fake' />.value) is True assert", "from typing import Union import pytest from mixt.internal.base import Base from mixt.exceptions import", "# force restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value:", "== 'foo' assert (<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo' />", "PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert", "not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False)", "override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert", "/> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal", "(<Foo value='false' />.value) is False # but this only works in non-dev mode", "value: bool with override_dev_mode(dev_mode=False): # normal behavior still works assert (<Foo value='value' />.value)", "is False assert (<Foo value='false' />.value) is False # but this only works", "restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices =", "the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__()", "# encapstulated with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not", "behavior still works assert (<Foo value='value' />.value) is True assert (<Foo value={False} />.value)", "PropTypes: value: int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) ==", "override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo' assert (<Foo complex='bar' />.complex) == 'bar'", "False # but this only works in non-dev mode assert (<Foo value='fake' />.value)", "in non-dev mode assert (<Foo value='fake' />.value) is True assert (<Foo value={0} />.value)", "/>.value) == 'foo' assert (<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo'", "test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode()", "assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__()", "toggled and does not validate data if off.\"\"\" from typing import Union import", "value='false' />.value) is False # but this only works in non-dev mode assert", "typing import Union import pytest from mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError,", "in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True)", "# force restore the normal state def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False):", "value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes:", "assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__()", "(<Foo value={False} />.value) is False assert (<Foo value='false' />.value) is False # but", "normal behavior still works assert (<Foo value='value' />.value) is True assert (<Foo value={False}", "test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert", "set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert", "force restore the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert", "import BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes", "with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert", "(<Foo value='foo' />.value) == 'foo' assert (<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError):", "in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ =", "class Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal behavior still works", "assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal state def", "mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices class DummyBase(Base): def", "# force restore the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False):", "# coding: mixt \"\"\"Ensure that dev-mode can be toggled and does not validate", "is True assert (<Foo value={False} />.value) is False assert (<Foo value='false' />.value) is", "class PropTypes: value: int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value)", "class PropTypes: value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value)", "override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True #", "override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode()", "/> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int complex: Union[int, float] with", "/>.value) is True assert (<Foo value={False} />.value) is False assert (<Foo value='false' />.value)", "PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not", "PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert", "False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode():", "normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert", "Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo' assert (<Foo complex='bar'", "mixt \"\"\"Ensure that dev-mode can be toggled and does not validate data if", "import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as", "= ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError):", "in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert", "def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try:", "not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False):", "import Union import pytest from mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError,", "PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with", "'foo' assert (<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo' /> with", "test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with", "set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices class DummyBase(Base): def _to_list(self, acc):", "class Foo(DummyBase): class PropTypes: value: int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo", "class PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal behavior still works assert (<Foo", "import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode import", "the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices = ['a',", "True # force restore the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle():", "PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert", "<Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class", "from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices class DummyBase(Base):", "with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' />", "override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode()", "assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode()", "/>.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class", "force restore the normal state def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert", "from mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode", "dev-mode can be toggled and does not validate data if off.\"\"\" from typing", "value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False): #", "still works assert (<Foo value='value' />.value) is True assert (<Foo value={False} />.value) is", "not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally:", "PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__()", "normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices = ['a', 'b']", "assert (<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError):", "assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode()", "from mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import", "try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode()", "with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__()", "assert (<Foo value='foo' />.value) == 'foo' assert (<Foo complex='bar' />.complex) == 'bar' with", "mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode", "in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True)", "PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_proptypes_context_manager():", "override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the", "= True # force restore the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def", "in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode() with", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert", "PropTypes.__dev_mode__ = True # force restore the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__()", "with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert", "InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode,", "def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__()", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True)", "in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore", "set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert", "PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal behavior still works assert (<Foo value='value'", "test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal behavior still", "is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def", "with override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert", "PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force restore", "PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not", "def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False)", "in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode()", "in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_choices_are_not_checked_in_non_dev_mode():", "assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True):", "override_dev_mode, in_dev_mode from mixt.proptypes import Choices class DummyBase(Base): def _to_list(self, acc): pass def", "= True # force restore the normal state def test_global_context_manager(): assert in_dev_mode() try:", "not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True #", "class Foo(DummyBase): class PropTypes: value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo", "with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True", "PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert", "assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force restore the", "PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__()", "try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__()", "Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c' with", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force restore the normal", "= True # force restore the normal state def test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class", "in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__", "/>.value) is False assert (<Foo value='false' />.value) is False # but this only", "def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal behavior", "complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo' assert (<Foo", "assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode()", "complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo' /> with pytest.raises(InvalidPropValueError): <Foo complex='bar'", "Foo(DummyBase): class PropTypes: value: int complex: Union[int, float] with override_dev_mode(dev_mode=False): assert (<Foo value='foo'", "value={False} />.value) is False assert (<Foo value='false' />.value) is False # but this", "bool with override_dev_mode(dev_mode=False): # normal behavior still works assert (<Foo value='value' />.value) is", "PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not", "PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices class", "PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ =", "PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert", "as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices", "value='fake' />.value) is True assert (<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo", "True assert (<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with", "with pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class", "set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal state", "['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo", "assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force", "normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert", "/> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value:", "in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_global_context_manager():", "assert (<Foo value={False} />.value) is False assert (<Foo value='false' />.value) is False #", "set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode()", "False assert (<Foo value='false' />.value) is False # but this only works in", "set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not", "state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__()", "<Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int complex: Union[int,", "PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not in_dev_mode() with", "= True # force restore the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try:", "encapstulated with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode()", "PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not", "in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True)", "PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with", "in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode()", "PropTypes: value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) ==", "with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore", "PropTypes.__dev_mode__ = True # force restore the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode()", "unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert", "assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True):", "unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not", "pytest.raises(InvalidPropBoolError): <Foo value='fake' /> with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase):", "validate data if off.\"\"\" from typing import Union import pytest from mixt.internal.base import", "finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_global_context_manager(): assert", "PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert", "assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__()", "assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode() set_dev_mode() assert", "the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode()", "not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally:", "set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ =", "True # force restore the normal state def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with", "data if off.\"\"\" from typing import Union import pytest from mixt.internal.base import Base", "assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert", "with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode()", "Foo(DummyBase): class PropTypes: value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c'", "InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode,", "PropTypes.__dev_mode__ = True # force restore the normal state def test_global_context_manager(): assert in_dev_mode()", "PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not in_dev_mode() with override_dev_mode(True): assert", "assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False):", "try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not", "== 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes:", "mixt.internal.proptypes import BasePropTypes as PropTypes from mixt.internal.dev_mode import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from", "only works in non-dev mode assert (<Foo value='fake' />.value) is True assert (<Foo", "value='foo' />.value) == 'foo' assert (<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo", "assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force", "set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore", "import Choices class DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def", "the normal state def test_global_context_manager(): assert in_dev_mode() try: with override_dev_mode(False): assert not in_dev_mode()", "test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode()", "True # force restore the normal state def test_global_context_manager(): assert in_dev_mode() try: with", "PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert", "try: with override_dev_mode(False): assert not in_dev_mode() assert in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not", "(<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class", "works in non-dev mode assert (<Foo value='fake' />.value) is True assert (<Foo value={0}", "<Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False):", "non-dev mode assert (<Foo value='fake' />.value) is True assert (<Foo value={0} />.value) is", "assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not", "assert (<Foo value='false' />.value) is False # but this only works in non-dev", "with pytest.raises(InvalidPropBoolError): <Foo value={0} /> def test_normal_value_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: int", "assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True):", "in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with", "assert not in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode() assert in_dev_mode() set_dev_mode(False) assert not in_dev_mode()", "in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True #", "(<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo' /> with pytest.raises(InvalidPropValueError): <Foo", "import set_dev_mode, unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices class DummyBase(Base): def _to_list(self,", "(<Foo value='value' />.value) is True assert (<Foo value={False} />.value) is False assert (<Foo", "in_dev_mode() set_dev_mode(False) with override_dev_mode(False): assert not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert", "# force restore the normal state def test_global_default_dev_mode_is_true(): assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert", "mixt.proptypes import Choices class DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__()", "assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() assert in_dev_mode() finally:", "assert not PropTypes.__in_dev_mode__() with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) with PropTypes.__override_dev_mode__(True):", "assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally:", "with override_dev_mode(dev_mode=False): assert (<Foo value='foo' />.value) == 'foo' assert (<Foo complex='bar' />.complex) ==", "assert (<Foo complex='bar' />.complex) == 'bar' with pytest.raises(InvalidPropValueError): <Foo value='foo' /> with pytest.raises(InvalidPropValueError):", "PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force restore the normal state", "test_choices_are_not_checked_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value: Choices = ['a', 'b'] with override_dev_mode(dev_mode=False): assert", "unset_dev_mode, override_dev_mode, in_dev_mode from mixt.proptypes import Choices class DummyBase(Base): def _to_list(self, acc): pass", "\"\"\"Ensure that dev-mode can be toggled and does not validate data if off.\"\"\"", "mode assert (<Foo value='fake' />.value) is True assert (<Foo value={0} />.value) is False", "class DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert", "not in_dev_mode() assert not in_dev_mode() with override_dev_mode(True): assert in_dev_mode() assert not in_dev_mode() set_dev_mode(True)", "works assert (<Foo value='value' />.value) is True assert (<Foo value={False} />.value) is False", "DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__()", "def test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False)", "in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal state", "with override_dev_mode(dev_mode=False): # normal behavior still works assert (<Foo value='value' />.value) is True", "not in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal", "not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode() finally: PropTypes.__dev_mode__ = True #", "Choices class DummyBase(Base): def _to_list(self, acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle():", "Foo(DummyBase): class PropTypes: value: bool with override_dev_mode(dev_mode=False): # normal behavior still works assert", "assert in_dev_mode() def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert", "assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert", "is False # but this only works in non-dev mode assert (<Foo value='fake'", "test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__()", "acc): pass def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert", "assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__()", "assert not in_dev_mode() set_dev_mode(True) with override_dev_mode(True): assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ =", "def test_global_set_dev_mode_toggle(): assert in_dev_mode() try: unset_dev_mode() assert not in_dev_mode() unset_dev_mode() assert not in_dev_mode()", "'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c' /> def test_boolean_is_not_validated_in_non_dev_mode(): class Foo(DummyBase): class PropTypes: value:", "and does not validate data if off.\"\"\" from typing import Union import pytest", "assert PropTypes.__in_dev_mode__() finally: PropTypes.__dev_mode__ = True # force restore the normal state def", "'b'] with override_dev_mode(dev_mode=False): assert (<Foo value='c' />.value) == 'c' with pytest.raises(InvalidPropChoiceError): <Foo value='c'", "value='value' />.value) is True assert (<Foo value={False} />.value) is False assert (<Foo value='false'", "assert in_dev_mode() assert in_dev_mode() finally: PropTypes.__dev_mode__ = True # force restore the normal", "assert not in_dev_mode() set_dev_mode(False) assert not in_dev_mode() set_dev_mode(True) assert in_dev_mode() set_dev_mode(True) assert in_dev_mode()", "PropTypes.__set_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert", "PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__() PropTypes.__unset_dev_mode__() assert not", "assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) assert not PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(True) assert PropTypes.__in_dev_mode__()", "with PropTypes.__override_dev_mode__(True): assert PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() # encapstulated with override_dev_mode(False): assert not in_dev_mode()", "test_proptypes_context_manager(): assert PropTypes.__in_dev_mode__() try: with PropTypes.__override_dev_mode__(False): assert not PropTypes.__in_dev_mode__() assert PropTypes.__in_dev_mode__() PropTypes.__set_dev_mode__(False) with", "finally: PropTypes.__dev_mode__ = True # force restore the normal state def test_global_default_dev_mode_is_true(): assert", "/>.value) is True assert (<Foo value={0} />.value) is False with pytest.raises(InvalidPropBoolError): <Foo value='fake'", "from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from mixt.internal.proptypes import BasePropTypes as PropTypes from", "# but this only works in non-dev mode assert (<Foo value='fake' />.value) is", "def test_proptypes_default_dev_mode_is_true(): assert PropTypes.__in_dev_mode__() def test_proptypes_set_dev_mode_toggle(): assert PropTypes.__in_dev_mode__() try: PropTypes.__unset_dev_mode__() assert not PropTypes.__in_dev_mode__()", "import pytest from mixt.internal.base import Base from mixt.exceptions import InvalidPropChoiceError, InvalidPropBoolError, InvalidPropValueError from" ]
[ "class Solution: # @param matrix, a list of lists of integers # RETURN", "else: y = 0 rows = [1]*x cols = [1]*y for i in", "= len(matrix[0]) else: y = 0 rows = [1]*x cols = [1]*y for", "for i in range(x): for j in range(y): if matrix[i][j] == 0: rows[i]", "in range(x): for j in range(y): if matrix[i][j] == 0: rows[i] = 0", "range(y): if cols[j] == 0: for i in range(x): matrix[i][j] = 0 m", "0 for i in range(x): if rows[i] == 0: for j in range(y):", "0: for j in range(y): matrix[i][j] = 0 for j in range(y): if", "if matrix[i][j] == 0: rows[i] = 0 cols[j] = 0 for i in", "in range(y): if matrix[i][j] == 0: rows[i] = 0 cols[j] = 0 for", "range(x): if rows[i] == 0: for j in range(y): matrix[i][j] = 0 for", "# RETURN NOTHING, MODIFY matrix IN PLACE. def setZeroes(self, matrix): x = len(matrix)", "IN PLACE. def setZeroes(self, matrix): x = len(matrix) if x > 0: y", "i in range(x): if rows[i] == 0: for j in range(y): matrix[i][j] =", "for i in range(x): if rows[i] == 0: for j in range(y): matrix[i][j]", "[1]*x cols = [1]*y for i in range(x): for j in range(y): if", "list of lists of integers # RETURN NOTHING, MODIFY matrix IN PLACE. def", "0 for j in range(y): if cols[j] == 0: for i in range(x):", "in range(x): if rows[i] == 0: for j in range(y): matrix[i][j] = 0", "cols[j] == 0: for i in range(x): matrix[i][j] = 0 m = [[1,1,1],[0,1,2]]", "lists of integers # RETURN NOTHING, MODIFY matrix IN PLACE. def setZeroes(self, matrix):", "NOTHING, MODIFY matrix IN PLACE. def setZeroes(self, matrix): x = len(matrix) if x", "= [1]*y for i in range(x): for j in range(y): if matrix[i][j] ==", "== 0: for j in range(y): matrix[i][j] = 0 for j in range(y):", "0 rows = [1]*x cols = [1]*y for i in range(x): for j", "j in range(y): matrix[i][j] = 0 for j in range(y): if cols[j] ==", "Solution: # @param matrix, a list of lists of integers # RETURN NOTHING,", "if x > 0: y = len(matrix[0]) else: y = 0 rows =", "in range(y): matrix[i][j] = 0 for j in range(y): if cols[j] == 0:", "0: y = len(matrix[0]) else: y = 0 rows = [1]*x cols =", "for j in range(y): if cols[j] == 0: for i in range(x): matrix[i][j]", "MODIFY matrix IN PLACE. def setZeroes(self, matrix): x = len(matrix) if x >", "setZeroes(self, matrix): x = len(matrix) if x > 0: y = len(matrix[0]) else:", "matrix[i][j] = 0 for j in range(y): if cols[j] == 0: for i", "a list of lists of integers # RETURN NOTHING, MODIFY matrix IN PLACE.", "def setZeroes(self, matrix): x = len(matrix) if x > 0: y = len(matrix[0])", "j in range(y): if matrix[i][j] == 0: rows[i] = 0 cols[j] = 0", "0 cols[j] = 0 for i in range(x): if rows[i] == 0: for", "for j in range(y): if matrix[i][j] == 0: rows[i] = 0 cols[j] =", "= 0 for j in range(y): if cols[j] == 0: for i in", "if cols[j] == 0: for i in range(x): matrix[i][j] = 0 m =", "> 0: y = len(matrix[0]) else: y = 0 rows = [1]*x cols", "len(matrix[0]) else: y = 0 rows = [1]*x cols = [1]*y for i", "= 0 rows = [1]*x cols = [1]*y for i in range(x): for", "i in range(x): for j in range(y): if matrix[i][j] == 0: rows[i] =", "in range(y): if cols[j] == 0: for i in range(x): matrix[i][j] = 0", "rows[i] == 0: for j in range(y): matrix[i][j] = 0 for j in", "cols = [1]*y for i in range(x): for j in range(y): if matrix[i][j]", "range(y): if matrix[i][j] == 0: rows[i] = 0 cols[j] = 0 for i", "in range(x): matrix[i][j] = 0 m = [[1,1,1],[0,1,2]] s = Solution() s.setZeroes(m) print(m)", "rows[i] = 0 cols[j] = 0 for i in range(x): if rows[i] ==", "RETURN NOTHING, MODIFY matrix IN PLACE. def setZeroes(self, matrix): x = len(matrix) if", "y = len(matrix[0]) else: y = 0 rows = [1]*x cols = [1]*y", "== 0: for i in range(x): matrix[i][j] = 0 m = [[1,1,1],[0,1,2]] s", "matrix, a list of lists of integers # RETURN NOTHING, MODIFY matrix IN", "of integers # RETURN NOTHING, MODIFY matrix IN PLACE. def setZeroes(self, matrix): x", "= len(matrix) if x > 0: y = len(matrix[0]) else: y = 0", "[1]*y for i in range(x): for j in range(y): if matrix[i][j] == 0:", "matrix[i][j] == 0: rows[i] = 0 cols[j] = 0 for i in range(x):", "= 0 cols[j] = 0 for i in range(x): if rows[i] == 0:", "matrix IN PLACE. def setZeroes(self, matrix): x = len(matrix) if x > 0:", "integers # RETURN NOTHING, MODIFY matrix IN PLACE. def setZeroes(self, matrix): x =", "PLACE. def setZeroes(self, matrix): x = len(matrix) if x > 0: y =", "j in range(y): if cols[j] == 0: for i in range(x): matrix[i][j] =", "len(matrix) if x > 0: y = len(matrix[0]) else: y = 0 rows", "range(y): matrix[i][j] = 0 for j in range(y): if cols[j] == 0: for", "@param matrix, a list of lists of integers # RETURN NOTHING, MODIFY matrix", "range(x): for j in range(y): if matrix[i][j] == 0: rows[i] = 0 cols[j]", "matrix): x = len(matrix) if x > 0: y = len(matrix[0]) else: y", "0: for i in range(x): matrix[i][j] = 0 m = [[1,1,1],[0,1,2]] s =", "rows = [1]*x cols = [1]*y for i in range(x): for j in", "of lists of integers # RETURN NOTHING, MODIFY matrix IN PLACE. def setZeroes(self,", "== 0: rows[i] = 0 cols[j] = 0 for i in range(x): if", "# @param matrix, a list of lists of integers # RETURN NOTHING, MODIFY", "x = len(matrix) if x > 0: y = len(matrix[0]) else: y =", "if rows[i] == 0: for j in range(y): matrix[i][j] = 0 for j", "for i in range(x): matrix[i][j] = 0 m = [[1,1,1],[0,1,2]] s = Solution()", "for j in range(y): matrix[i][j] = 0 for j in range(y): if cols[j]", "= [1]*x cols = [1]*y for i in range(x): for j in range(y):", "x > 0: y = len(matrix[0]) else: y = 0 rows = [1]*x", "= 0 for i in range(x): if rows[i] == 0: for j in", "y = 0 rows = [1]*x cols = [1]*y for i in range(x):", "i in range(x): matrix[i][j] = 0 m = [[1,1,1],[0,1,2]] s = Solution() s.setZeroes(m)", "0: rows[i] = 0 cols[j] = 0 for i in range(x): if rows[i]", "cols[j] = 0 for i in range(x): if rows[i] == 0: for j" ]
[ "inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\" Displays an embed", "double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\" Displays", "default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of", "date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs", "if a duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "how to use the user command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information", "\"\"\" User embeds. \"\"\" async def create_user_embed(user, author, color): \"\"\" Creates an embed", "async def create_general_help_embed(author): \"\"\" Displays an embed with instructions on how to use", ") Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel", "user to a duel over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the", "by a user, grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems", "refers to problems that do not have a rating on Codeforces.\", color=0xFF0000, )", "inline=False, ) if \"city\" in user and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']},", "if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays", "to use the plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems", "- Displays last n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def create_user_embed(user, author, color):", "return Embed async def create_user_help_embed(author): \"\"\" Displays an embed with instructions on how", "description=\"Plots the problems done by a user, grouped by rating.\", color=0xFF0000, ) Embed.add_field(", "async def create_user_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed async", "solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds. \"\"\" async", "- Displays a random problem of that rating.\\n`-problem <list_of_tags>` - Displays a random", "\"\"\" Displays an embed with instructions on how to use the plotindex command.", "contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "\"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an", "title=\"plotrating\", description=\"Plots the problems done by a user, grouped by rating.\", color=0xFF0000, )", "index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", ") else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False,", "rating.\\n`-problem <list_of_tags>` - Displays a random problem of those tags (multiple tags are", "!= \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\"", "\"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) #", "to use the stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last", "def create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "use the plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems done", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\" Displays an embed with", "Embed \"\"\" Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates an", "Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems done by a user, grouped by", "\"\"\" async def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an embed with information", "Creates an embed with user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color,", "%H:%M\") # Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600", "user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last n problems solved by a", "author): \"\"\" Creates an embed with the index plot of a user. \"\"\"", "by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "async def create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\") #", "tags plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\")", "solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field(", "if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False)", "def create_stalk_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "the plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems done by", "a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\"", "with information about a user's last n solved problems. \"\"\" Embed = discord.Embed(", "on how to use the user command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays", "= discord.Embed( title=\"user\", description=\"Displays information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\",", "grouped by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url,", "mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\" async def", "url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags in spoilers if", "\"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\" Creates an embed with", "create_rating_plot_embed(handle, author): \"\"\" Creates an embed with the rating plot of a user.", "a user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "problem of those tags and rating (order does not matter).\\n\\nNote: For tags like", "\"\"\" Displays an embed with instructions on how to use the duel command.", "\"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if", "a duel and decide the result (only if a duel is in progress).\",", ") if \"city\" in user and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\",", "solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds.", ") Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags in spoilers if problem[\"tags\"] !=", "def create_index_plot_embed(handle, author): \"\"\" Creates an embed with the index plot of a", "value=\"Displays the list of upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another", "async def create_contest_embed(contestList, author): \"\"\" Creates an embed with contest information. \"\"\" Embed", "over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by a", "to use the duel command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another user", "of those tags and rating (order does not matter).\\n\\nNote: For tags like \"binary", "functions related to Discord-specific features, such as embeds. \"\"\" import discord import datetime", "# Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes", "= discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle,", "Displays an embed with instructions on how to use the upcoming command. \"\"\"", "the duel command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another user to a", "async def create_rating_plot_embed(handle, author): \"\"\" Creates an embed with the rating plot of", "in user and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if", "async def create_duel_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "= discord.Embed( title=\"plotrating\", description=\"Plots the problems done by a user, grouped by rating.\",", "name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field(", "def create_user_embed(user, author, color): \"\"\" Creates an embed with user information. \"\"\" Embed", "create_problem_embed(problem, author): \"\"\" Creates an embed with problem information. \"\"\" Embed = discord.Embed(", "value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds. \"\"\" async", "\"\"\" Displays an embed with information about the duel. \"\"\" Embed = discord.Embed(", "an embed with information about all ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing", "Creates an embed with the tags plot of a user. \"\"\" Embed =", "and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem <rating>`", "if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name}", "problem of that rating.\\n`-problem <list_of_tags>` - Displays a random problem of those tags", "author): \"\"\" Creates an embed with contest information. \"\"\" Embed = discord.Embed(title=\"List of", "async def create_problem_embed(problem, author): \"\"\" Creates an embed with problem information. \"\"\" Embed", "(only if a duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "author): \"\"\" Creates an embed with problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}.", "plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems done by a", "\"\"\" Displays an embed with instructions on how to use the stalk command.", "{minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\" async", "Embed = discord.Embed( title=\"duel\", description=\"Challenges another user to a duel over a problem.\",", "how to use the plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the", "inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\" Displays an embed", ") Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by a user, grouped by rating.\",", "Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>` - Displays last n", "in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in user: Embed.add_field(", "<optional_rating> <optional_tags>` - To challenge a user\\n`-endduel` - To end a duel and", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author): \"\"\" Displays an embed with instructions", "discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding each contest as a field to", "duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes = (duration.seconds // 60)", "Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle, author): \"\"\" Creates an embed", "embed with instructions on how to use the plotindex command. \"\"\" Embed =", "\"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed with information about", "return Embed async def create_duel_help_embed(author): \"\"\" Displays an embed with instructions on how", "= discord.Embed( title=\"plottags\", description=\"Plots the problems done by a user, grouped by tags.\",", "// 3600 minutes = (duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} -", "\"\"\" Creates an embed with the index plot of a user. \"\"\" Embed", "\"\"\" Displays an embed with information about all ongoing duels. \"\"\" Embed =", "inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user to a duel over a problem.\",", "problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs", "description=\"Displays a random problem of optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\",", "to learn about a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about", "name=\"user\", value=\"Displays information about a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last", "duel and decide the result (only if a duel is in progress).\", inline=False,", "60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs, {minutes}", "about all ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) #", "problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates", "return Embed async def create_plottags_help_embed(author): \"\"\" Displays an embed with instructions on how", "\"\"\" Creates an embed with information about a user's last n solved problems.", "field to the embed for contest in contestList: # Obtaining the start time", "information about all ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, )", "of upcoming contests\", color=0xFF0000) # Adding each contest as a field to the", "import time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import", "discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields to embed for duel in", "to use the problem command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a random", "of the user\\n`-stalk <codeforces_handle> <n>` - Displays last n submissions of the user\",", "contest information. \"\"\" Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding each", "in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field(", "Creates an embed with the rating plot of a user. \"\"\" Embed =", "duel over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` -", "and rating (order does not matter).\\n\\nNote: For tags like \"binary search\", enclose the", "Embed async def create_plottags_help_embed(author): \"\"\" Displays an embed with instructions on how to", "Embed.add_field( name=\"stalk\", value=\"Displays the last n problems solved by a user.\", inline=False, )", "information about a user's last n solved problems. \"\"\" Embed = discord.Embed( title=f\"Last", "a user, grouped by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems", "\"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False,", "= discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields to embed for duel", "= discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to learn about a specific command.\",", ") Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags in spoilers if problem[\"tags\"] !=", "rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by a user, grouped", ") Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "name=\"upcoming\", value=\"Displays the list of upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges", "description=\"Displays information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\" Displays", "{user['country']}\", inline=False, ) if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else:", "- To end a duel and decide the result (only if a duel", "\"\"\" Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates an embed", "how to use the problem command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a", "with instructions on how to use all commands. \"\"\" Embed = discord.Embed( title=\"Help", "user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the", "a random problem of that rating.\\n`-problem <list_of_tags>` - Displays a random problem of", "\"\"\" Creates an embed with problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\",", "a user\\n`-endduel` - To end a duel and decide the result (only if", "\"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems done by a user, grouped", "Creates an embed with information about a user's last n solved problems. \"\"\"", "with information about all ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000,", "= discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds.", "use all commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to", "random problem of optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` -", "plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems done by a", "time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\")", "embed with the rating plot of a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s", "a user, grouped by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False", "how to use the stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the", "Creates an embed with problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\",", "= date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours", "contest in contestList: # Obtaining the start time of the contest date =", "use the user command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information about a", ") Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "a random problem.\\n`-problem <rating>` - Displays a random problem of that rating.\\n`-problem <list_of_tags>`", "color): \"\"\" Creates an embed with user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"],", "the problems done by a user, grouped by contest index.\", color=0xFF0000, ) Embed.add_field(", ") # Adding fields to embed for duel in duels: date = datetime.datetime.strptime(", "description=\"Challenges another user to a duel over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\",", "embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates an embed with the rating", ") Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge a user\\n`-endduel` -", "inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\" Displays an embed", "Displays an embed with instructions on how to use the plotrating command. \"\"\"", "in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\"", "user\\n`-endduel` - To end a duel and decide the result (only if a", "command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems done by a user,", "n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author):", "Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user to a duel over", "command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000, )", "value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\" Displays", "value=\"Plots the problems done by a user, grouped by contest index.\", inline=False, )", "with the rating plot of a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved", "rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem", "discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers to problems that do not have", "all ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding", "minutes = (duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]}", "value=\"Displays the last n problems solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays", "(order does not matter).\\n\\nNote: For tags like \"binary search\", enclose the tag in", "by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds. \"\"\"", "embed with information about a user's last n solved problems. \"\"\" Embed =", "\"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates an embed with contest information. \"\"\"", "to problems that do not have a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\")", "done by a user, grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\",", "inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions,", "Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to learn about a specific", "problems solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False)", "tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random problem of", "how to use the upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information", "user\\n`-stalk <codeforces_handle> <n>` - Displays last n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url,", "? refers to problems that do not have a rating on Codeforces.\", color=0xFF0000,", "enclose the tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds. \"\"\" async def", "= discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False)", "{problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing", "for contest in contestList: # Obtaining the start time of the contest date", "instructions on how to use the plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\",", "Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds.", "description=\"Plots the problems done by a user, grouped by contest index.\", color=0xFF0000, )", "value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, )", "index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems done by a user, grouped", "with instructions on how to use the stalk command. \"\"\" Embed = discord.Embed(", "problem of optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays", "decide the result (only if a duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url,", "Embed async def create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions on how to", "%d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time: {date} {time.tzname[0]}\", inline=False,", "\"\"\" async def create_problem_embed(problem, author): \"\"\" Creates an embed with problem information. \"\"\"", "Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags in spoilers if problem[\"tags\"] != \"[]\":", "embed with the index plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved", "color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\"", "color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates an", "Embed async def create_duels_embed(duels): \"\"\" Displays an embed with information about all ongoing", "does not matter).\\n\\nNote: For tags like \"binary search\", enclose the tag in double", "and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in", "return Embed \"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\" Creates an", "tags and rating (order does not matter).\\n\\nNote: For tags like \"binary search\", enclose", "\"\"\" import discord import datetime import time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL,", "{handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds. \"\"\" async", "hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\"", "Displays last n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "{time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\"", "{dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "= discord.Embed( title=\"duel\", description=\"Challenges another user to a duel over a problem.\", color=0xFF0000,", "value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds. \"\"\" async def", "color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "@<discord_user> <optional_rating> <optional_tags>` - To challenge a user\\n`-endduel` - To end a duel", "problem.\\n`-problem <rating>` - Displays a random problem of that rating.\\n`-problem <list_of_tags>` - Displays", "with instructions on how to use the plotrating command. \"\"\" Embed = discord.Embed(", "use the upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming", "a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "on how to use all commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type", "def create_plottags_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "with instructions on how to use the user command. \"\"\" Embed = discord.Embed(", "return Embed async def create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions on how", "on how to use the plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots", "tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, )", "user command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information about a user.\", color=0xFF0000", "= discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding each contest as a field", "\"\"\" async def create_user_embed(user, author, color): \"\"\" Creates an embed with user information.", "<codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\" Displays an embed", "the problems done by a user, grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\",", "title=\"stalk\", description=f\"Displays the last n problems solved by a user ({NUMBER_OF_ACS} by default).\",", "% 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\",", "upcoming contests\", color=0xFF0000) # Adding each contest as a field to the embed", "by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "= duration.seconds // 3600 minutes = (duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"],", "about the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts", "create_user_embed(user, author, color): \"\"\" Creates an embed with user information. \"\"\" Embed =", "grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by a", "discord import datetime import time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, )", "title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields to embed for duel in duels:", "how to use the plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the", "PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def create_user_embed(user,", "return Embed \"\"\" Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays an embed", "return Embed async def create_duels_embed(duels): \"\"\" Displays an embed with information about all", ") Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']}", "tags in spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags)", "with the index plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\",", "user to a duel over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user>", "value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\" Displays an embed", "inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last n problems solved by a user.\",", "discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle, author):", "\"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed", "return Embed async def create_problem_help_embed(author): \"\"\" Displays an embed with instructions on how", "url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\" in user: Embed.add_field(", "discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\" in", "with instructions on how to use the plotindex command. \"\"\" Embed = discord.Embed(", "duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes = (duration.seconds //", "Embed = discord.Embed( title=\"problem\", description=\"Displays a random problem of optional rating and/or tags.\",", "matter).\\n\\nNote: For tags like \"binary search\", enclose the tag in double quotes.', inline=False,", "a duel over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>`", "vs {opponent.display_name}\", inline=False, ) return Embed async def create_duels_embed(duels): \"\"\" Displays an embed", "def create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "to use the plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems", "inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem,", "inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming Codeforces contests.\", inline=False, ) Embed.add_field(", "Adding each contest as a field to the embed for contest in contestList:", "embed with instructions on how to use the plotrating command. \"\"\" Embed =", "solved problems\", description=\"Note: ? refers to problems that do not have a rating", "Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by a user, grouped by contest index.\",", "inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\" Displays an embed", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author,", "tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests", "to embed for duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b", "\"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\"", "information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions on", "= discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\"", "last n problems solved by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field(", "create_general_help_embed(author): \"\"\" Displays an embed with instructions on how to use all commands.", "Printing the tags in spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"])", "inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by a user, grouped by", "solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle, author): \"\"\"", "the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\",", "duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart", "Displays an embed with instructions on how to use the duel command. \"\"\"", "all commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to learn", "create_problem_help_embed(author): \"\"\" Displays an embed with instructions on how to use the problem", "command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last n problems solved by", "tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author):", "value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\" Displays an", "name=\"plotrating\", value=\"Plots the problems done by a user, grouped by rating.\", inline=False, )", "problem command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a random problem of optional", "Embed async def create_index_plot_embed(handle, author): \"\"\" Creates an embed with the index plot", "end a duel and decide the result (only if a duel is in", "last n problems solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random", "contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest", ") Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle, author): \"\"\" Creates an", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\" Displays an embed with", "name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed async def create_duels_embed(duels): \"\"\" Displays", "create_plottags_help_embed(author): \"\"\" Displays an embed with instructions on how to use the plottags", "an embed with instructions on how to use all commands. \"\"\" Embed =", "an embed with information about a user's last n solved problems. \"\"\" Embed", "Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an embed", "- Displays a random problem of those tags and rating (order does not", "on how to use the problem command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays", "value=\"Displays information about a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last n", "color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds. \"\"\" async def create_general_help_embed(author):", "Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\" Displays", "user and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\"", "on how to use the plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots", "text=str(author)) return Embed \"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\" Creates", "on how to use the stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays", "value=\"Plots the problems done by a user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url,", "a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge", "import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def create_user_embed(user, author, color): \"\"\" Creates", "Embed async def create_stalk_help_embed(author): \"\"\" Displays an embed with instructions on how to", "description=\"Displays information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "n problems solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\",", "solved problems. \"\"\" Embed = discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000,", "Displays an embed with instructions on how to use the user command. \"\"\"", "= discord.Embed( title=\"stalk\", description=f\"Displays the last n problems solved by a user ({NUMBER_OF_ACS}", "value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed async def create_duels_embed(duels):", "async def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an embed with information about", "def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an embed with information about a", "tags like \"binary search\", enclose the tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url,", "problems done by a user, grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating", "a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays", "= datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes = (duration.seconds // 60) %", "Embed = discord.Embed( title=\"user\", description=\"Displays information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem, author):", "start time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y,", "create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions on how to use the upcoming", "async def create_problem_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "count, handle, author): \"\"\" Creates an embed with information about a user's last", "<codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\" Displays an", "author): \"\"\" Creates an embed with the rating plot of a user. \"\"\"", "duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields to", ").strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time: {date} {time.tzname[0]}\",", "NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async", "author): \"\"\" Creates an embed with information about a user's last n solved", "quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\" Displays an", "ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields", "problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge a", "Discord-specific features, such as embeds. \"\"\" import discord import datetime import time from", "import discord import datetime import time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL,", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates an embed with", "user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\" Displays an embed", "\"firstName\" in user and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, )", "with instructions on how to use the upcoming command. \"\"\" Embed = discord.Embed(", "color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags in spoilers if problem[\"tags\"]", "the user command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information about a user.\",", "grouped by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems done by", "\"\"\" Creates an embed with the rating plot of a user. \"\"\" Embed", "Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\",", "\"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a random problem of optional rating and/or", "the tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "embeds. \"\"\" async def create_user_embed(user, author, color): \"\"\" Creates an embed with user", "Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers to problems that do", "\"\"\" Displays an embed with instructions on how to use the upcoming command.", "Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates an embed with the", "to a duel over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating>", "Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle>", "title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) #", "the tags in spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\",", "\"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming", "async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed with information about the", "as a field to the embed for contest in contestList: # Obtaining the", ") if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\",", "another user to a duel over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots", "a field to the embed for contest in contestList: # Obtaining the start", "\"\"\" Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays an embed with instructions", "solved by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>`", "inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem,", "Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help", "= discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers to problems that do not", "user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False)", "color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge a user\\n`-endduel`", "on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle, author):", "description=\"Note: ? refers to problems that do not have a rating on Codeforces.\",", "user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in user and \"country\"", "discord.Embed( title=\"problem\", description=\"Displays a random problem of optional rating and/or tags.\", color=0xFF0000, )", "create_duel_help_embed(author): \"\"\" Displays an embed with instructions on how to use the duel", "Embed.add_field( name=\"plottags\", value=\"Plots the problems done by a user, grouped by tags.\", inline=False,", "to use the upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about", "by a user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming Codeforces contests.\",", "Displays an embed with instructions on how to use all commands. \"\"\" Embed", "description=\"Plots the problems done by a user, grouped by tags.\", color=0xFF0000, ) Embed.add_field(", "by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems done by a", "discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "problems done by a user, grouped by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\",", "color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem <rating>` - Displays", "Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge a user\\n`-endduel` - To", "a user's last n solved problems. \"\"\" Embed = discord.Embed( title=f\"Last {count} solved", "\"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\",", "how to use the plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the", "\"\"\" Creates an embed with contest information. \"\"\" Embed = discord.Embed(title=\"List of upcoming", "on how to use the duel command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle, author): \"\"\" Creates an embed with", "problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming Codeforces contests.\", inline=False, )", "import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds.", "async def create_duels_embed(duels): \"\"\" Displays an embed with information about all ongoing duels.", "= (duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} -", "author): \"\"\" Creates an embed with the tags plot of a user. \"\"\"", "discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url,", "Embed async def create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions on how to", "a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers to", "name=\"stalk\", value=\"Displays the last n problems solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\",", "the rating plot of a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\",", "Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url,", "\"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time:", "description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags", "description=f\"Displays the last n problems solved by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000,", "(duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours}", "the tags plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000)", "Displays a random problem.\\n`-problem <rating>` - Displays a random problem of that rating.\\n`-problem", "an embed with the tags plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s", "inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by a user, grouped by", "an embed with instructions on how to use the stalk command. \"\"\" Embed", "Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed async def create_duels_embed(duels): \"\"\"", "= datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\",", "on how to use the plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots", ") from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def create_user_embed(user, author,", "problems that do not have a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url,", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\" Displays an embed with instructions", "!= \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\",", "# Adding each contest as a field to the embed for contest in", "over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To", "return Embed \"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates an", "grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to learn about", "Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and", "fields to embed for duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\"", "3600 minutes = (duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString}", "%H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time: {date} {time.tzname[0]}\", inline=False, ) return", "user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author):", "learn about a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about a", "to the embed for contest in contestList: # Obtaining the start time of", "color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle, author): \"\"\" Creates", "embed for contest in contestList: # Obtaining the start time of the contest", "\"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems done by a user, grouped", "instructions on how to use the stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\",", ") Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk", "discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags", "command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a random problem of optional rating", "Adding fields to embed for duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d", "contest as a field to the embed for contest in contestList: # Obtaining", "Displays a random problem of those tags and rating (order does not matter).\\n\\nNote:", "with user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if", "( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\"", "discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds. \"\"\"", "color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of the", "in user and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if", "user, grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url,", "%Y, %H:%M\") # Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds //", "duels\", color=0xFF0000, ) # Adding fields to embed for duel in duels: date", "embeds. \"\"\" import discord import datetime import time from botforces.utils.constants import ( NUMBER_OF_ACS,", "Embed.add_field( name=\"user\", value=\"Displays information about a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the", "upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "to Discord-specific features, such as embeds. \"\"\" import discord import datetime import time", "Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author):", "Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes =", "discord.Embed( title=\"plotrating\", description=\"Plots the problems done by a user, grouped by rating.\", color=0xFF0000,", "of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\" Displays", "Displays a random problem of those tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>`", "rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle,", "as embeds. \"\"\" import discord import datetime import time from botforces.utils.constants import (", "with instructions on how to use the plottags command. \"\"\" Embed = discord.Embed(", "= discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "instructions on how to use the problem command. \"\"\" Embed = discord.Embed( title=\"problem\",", "Embed \"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an", "def create_rating_plot_embed(handle, author): \"\"\" Creates an embed with the rating plot of a", "inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author): \"\"\" Displays an embed", "value=problem[4], inline=False) # Printing the tags in spoilers if problem[\"tags\"] != \"[]\": tags", "text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count, handle, author):", "\"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates an embed with", ") Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by a user, grouped by contest", "to use the user command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information about", "the stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last n problems", "<codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>` - Displays", "def create_duel_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "with instructions on how to use the duel command. \"\"\" Embed = discord.Embed(", "done by a user, grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\",", "a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about a user.\", inline=False", "create_duels_embed(duels): \"\"\" Displays an embed with information about all ongoing duels. \"\"\" Embed", "color=0xFF0000, ) # Adding fields to embed for duel in duels: date =", "`-help command` to learn about a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays", "problems solved by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk", "user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in user: Embed.add_field( name=\"Rank\",", ") Embed.add_field( name=\"stalk\", value=\"Displays the last n problems solved by a user.\", inline=False,", "\"\"\" Displays an embed with instructions on how to use the problem command.", "Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\" Displays an", "\"\"\" Contains functions related to Discord-specific features, such as embeds. \"\"\" import discord", "instructions on how to use the plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\",", "value=problem[\"rating\"], inline=False) # Printing the tags in spoilers if problem[\"tags\"] != \"[]\": tags", "tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author): \"\"\" Displays an", "(multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random problem of those", "name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds. \"\"\"", "instructions on how to use the duel command. \"\"\" Embed = discord.Embed( title=\"duel\",", "\"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in user:", "the plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\", description=\"Plots the problems done by", "contestList: # Obtaining the start time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString", "about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem <rating>` -", "duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000,", "discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to learn about a specific command.\", color=0xFF0000,", "text=str(author)) return Embed \"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\"", "embed with the tags plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions", "Embed async def create_user_help_embed(author): \"\"\" Displays an embed with instructions on how to", "enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def create_user_embed(user, author, color): \"\"\" Creates an", "the problems done by a user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "async def create_tags_plot_embed(handle, author): \"\"\" Creates an embed with the tags plot of", "name=\"plottags\", value=\"Plots the problems done by a user, grouped by tags.\", inline=False, )", "of that rating.\\n`-problem <list_of_tags>` - Displays a random problem of those tags (multiple", "by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` -", "value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList,", "= await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return", "def create_duels_embed(duels): \"\"\" Displays an embed with information about all ongoing duels. \"\"\"", "# Adding fields to embed for duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"],", "the last n problems solved by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, )", "a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "each contest as a field to the embed for contest in contestList: #", "Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays an embed with instructions on", "name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in user and \"country\" in user:", "embed with information about the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\",", "text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\" Displays an embed with instructions on", "the embed for contest in contestList: # Obtaining the start time of the", "Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates an embed with", "the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining", "%d, %Y, %H:%M\") # Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds", "Obtaining the start time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count, handle,", "def create_tags_plot_embed(handle, author): \"\"\" Creates an embed with the tags plot of a", "create_tags_plot_embed(handle, author): \"\"\" Creates an embed with the tags plot of a user.", "inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list", "Embed async def create_duel_help_embed(author): \"\"\" Displays an embed with instructions on how to", "with instructions on how to use the problem command. \"\"\" Embed = discord.Embed(", "value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming Codeforces", ") Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of", "<list_of_tags>` - Displays a random problem of those tags (multiple tags are allowed).\\n`-problem", "{hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds.", "done by a user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems done by a user, grouped by", "in spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url,", "an embed with instructions on how to use the problem command. \"\"\" Embed", "user's last n solved problems. \"\"\" Embed = discord.Embed( title=f\"Last {count} solved by", "plot of a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ?", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\" Displays an embed with instructions", "return Embed async def create_index_plot_embed(handle, author): \"\"\" Creates an embed with the index", "date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest duration", "description=\"Type `-help command` to learn about a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\",", "the problems done by a user, grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\",", "the list of upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user", "the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\" Displays an", "by a user, grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False", "an embed with instructions on how to use the plotindex command. \"\"\" Embed", "return Embed async def create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions on how", "// 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs,", "else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, )", "discord.Embed( title=\"user\", description=\"Displays information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False)", "handle, author): \"\"\" Creates an embed with information about a user's last n", "text=str(author)) return Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates an embed with the", "of a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers", "title=\"problem\", description=\"Displays a random problem of optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field(", "async def create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "by a user, grouped by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\",", "contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author):", "\"city\" in user and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, )", "title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\" in user:", "with information about the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The", "embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays an embed with instructions on how", "{problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags in spoilers", "duel command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another user to a duel", "opponent): \"\"\" Displays an embed with information about the duel. \"\"\" Embed =", "create_contest_embed(contestList, author): \"\"\" Creates an embed with contest information. \"\"\" Embed = discord.Embed(title=\"List", "Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False, )", "Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\"", "embed with instructions on how to use the user command. \"\"\" Embed =", "the last n problems solved by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\" Displays an embed with instructions", "USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def", "by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "text=str(author)) return Embed \"\"\" Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\"", "Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\",", "# Printing the tags in spoilers if problem[\"tags\"] != \"[]\": tags = await", "Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming", "async def create_user_embed(user, author, color): \"\"\" Creates an embed with user information. \"\"\"", "User embeds. \"\"\" async def create_user_embed(user, author, color): \"\"\" Creates an embed with", ") Embed.add_field( name=\"plottags\", value=\"Plots the problems done by a user, grouped by tags.\",", "create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions on how to use the plotrating", "<optional_tags>` - To challenge a user\\n`-endduel` - To end a duel and decide", "create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an embed with information about a user's", "Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user:", "Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems done by a user, grouped by", "<codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\" Displays an", "Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\",", "duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author):", "an embed with instructions on how to use the plotrating command. \"\"\" Embed", "random problem of those tags and rating (order does not matter).\\n\\nNote: For tags", "inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\" Displays an embed with", "grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "an embed with instructions on how to use the duel command. \"\"\" Embed", "information about a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last n problems", "last n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\",", "contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems done by a user,", "problem of those tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a", "a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by a user,", "and decide the result (only if a duel is in progress).\", inline=False, )", "Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last n problems solved by a user", "= datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest duration duration", "n solved problems. \"\"\" Embed = discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions,", "\"\"\" Displays an embed with instructions on how to use the plottags command.", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author):", "Menu\", description=\"Type `-help command` to learn about a specific command.\", color=0xFF0000, ) Embed.add_field(", "async def create_stalk_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "embed with problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, )", "# Obtaining the start time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString =", "those tags and rating (order does not matter).\\n\\nNote: For tags like \"binary search\",", "Displays an embed with instructions on how to use the plottags command. \"\"\"", "Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields to embed for", "hours = duration.seconds // 3600 minutes = (duration.seconds // 60) % 60 Embed.add_field(", "- {hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk", "- Displays a random problem.\\n`-problem <rating>` - Displays a random problem of that", "user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers to problems", "<list_of_tags>` - Displays a random problem of those tags and rating (order does", "\"\"\" Creates an embed with the tags plot of a user. \"\"\" Embed", "inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"],", "discord.Embed( title=\"plottags\", description=\"Plots the problems done by a user, grouped by tags.\", color=0xFF0000,", "user, grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by", "embed with instructions on how to use the stalk command. \"\"\" Embed =", "title=\"Help Menu\", description=\"Type `-help command` to learn about a specific command.\", color=0xFF0000, )", "last n solved problems. \"\"\" Embed = discord.Embed( title=f\"Last {count} solved by {handle}\",", "have a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "of those tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random", "if \"firstName\" in user and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False,", "is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\"", "a duel over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems done", "color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle,", "problems. \"\"\" Embed = discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000, )", "random problem of that rating.\\n`-problem <list_of_tags>` - Displays a random problem of those", "submissions of the user\\n`-stalk <codeforces_handle> <n>` - Displays last n submissions of the", "create_user_help_embed(author): \"\"\" Displays an embed with instructions on how to use the user", "problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4],", "the problems done by a user, grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\",", "<rating> <list_of_tags>` - Displays a random problem of those tags and rating (order", "duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags in", "contests embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates an embed with contest", "n problems solved by a user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\",", "text=str(author)) return Embed async def create_index_plot_embed(handle, author): \"\"\" Creates an embed with the", "\"\"\" Embed = discord.Embed( title=\"Ongoing duels\", color=0xFF0000, ) # Adding fields to embed", "\"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help command` to learn about a", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author): \"\"\" Displays an embed with", "in spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field(", "command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another user to a duel over", "import datetime import time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from", "Embed async def create_problem_help_embed(author): \"\"\" Displays an embed with instructions on how to", "tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in", "Displays an embed with instructions on how to use the problem command. \"\"\"", "Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds. \"\"\"", "an embed with problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000,", "({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS}", "\"\"\" Creates an embed with user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\",", "in contestList: # Obtaining the start time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"])", "Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "- To challenge a user\\n`-endduel` - To end a duel and decide the", "use the plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems done", "return Embed async def create_stalk_help_embed(author): \"\"\" Displays an embed with instructions on how", "title=f\"{handle}'s solved problems\", description=\"Note: ? refers to problems that do not have a", "by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author): \"\"\" Displays", "the plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems done by", "color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags in spoilers if problem[\"tags\"]", "an embed with information about the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\",", "user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds.", "value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>` -", ") Embed.add_field( name=\"user\", value=\"Displays information about a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays", "optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random", "those tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random problem", "the start time of the contest date = datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d,", "embed with user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"])", "title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "async def create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions on how to use", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count,", "Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates an embed", "progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\" Displays an", "embed with information about all ongoing duels. \"\"\" Embed = discord.Embed( title=\"Ongoing duels\",", "return Embed \"\"\" Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates", "Creates an embed with the index plot of a user. \"\"\" Embed =", "user ({NUMBER_OF_ACS} by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last", "Embed async def create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions on how to", "done by a user, grouped by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots", "\"\"\" Embed = discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url,", "<codeforces_handle> <n>` - Displays last n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "value='`-problem` - Displays a random problem.\\n`-problem <rating>` - Displays a random problem of", "rating (order does not matter).\\n\\nNote: For tags like \"binary search\", enclose the tag", "value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in user and \"country\" in user: Embed.add_field(", "do not have a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "title=\"user\", description=\"Displays information about a user.\", color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url,", "grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_user_help_embed(author): \"\"\"", "the index plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000)", "datetime.datetime.fromtimestamp(contest[\"startTimeSeconds\"]) dateString = date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest duration duration =", "problems done by a user, grouped by tags.\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "= discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\" Displays an embed with instructions", "starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags in spoilers", "how to use the duel command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another", "def create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "\"\"\" Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding each contest as", "user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in", "value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "title=\"plotindex\", description=\"Plots the problems done by a user, grouped by contest index.\", color=0xFF0000,", "await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed", "if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "instructions on how to use all commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\",", "def create_problem_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "name=\"duel\", value=\"Challenges another user to a duel over a problem.\", inline=False, ) Embed.add_field(", "upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000,", "specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about a user.\", inline=False )", "{NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>` - Displays last n submissions of", "user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about a user.\", inline=False ) Embed.add_field( name=\"stalk\",", "Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding each contest as a", "Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\", value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed async def", "contests\", color=0xFF0000) # Adding each contest as a field to the embed for", "\"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems done by a user, grouped", "text=str(author)) return Embed \"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates", "result (only if a duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "Contains functions related to Discord-specific features, such as embeds. \"\"\" import discord import", "async def create_index_plot_embed(handle, author): \"\"\" Creates an embed with the index plot of", "user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\"", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\"", "Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by a user, grouped by rating.\", inline=False,", "{count} solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\" Displays an embed with", "\"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\",", "def create_problem_embed(problem, author): \"\"\" Creates an embed with problem information. \"\"\" Embed =", "duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']}", "\"\"\" Displays an embed with instructions on how to use all commands. \"\"\"", "command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about a user.\", inline=False ) Embed.add_field(", "Displays an embed with information about all ongoing duels. \"\"\" Embed = discord.Embed(", "that do not have a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "information about the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel", "To end a duel and decide the result (only if a duel is", "plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems done by a", "text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions on", "contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user to a duel over a", "return Embed \"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count, handle, author): \"\"\"", "in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in user and", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests embeds. \"\"\" async def create_contest_embed(contestList, author):", "Embed = discord.Embed( title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "= discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"],", "name=\"plotindex\", value=\"Plots the problems done by a user, grouped by contest index.\", inline=False,", "embed with instructions on how to use all commands. \"\"\" Embed = discord.Embed(", "instructions on how to use the plottags command. \"\"\" Embed = discord.Embed( title=\"plottags\",", "embeds. \"\"\" async def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates an embed with", "Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed with", "use the problem command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a random problem", "are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random problem of those tags and", "To challenge a user\\n`-endduel` - To end a duel and decide the result", "text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\" Displays an embed with instructions on", "embed for duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d,", "dateString = date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"])", "now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the tags in spoilers if", "value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge a user\\n`-endduel` - To end a", "stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last n problems solved", "the user\\n`-stalk <codeforces_handle> <n>` - Displays last n submissions of the user\", )", "embed with instructions on how to use the problem command. \"\"\" Embed =", "that rating.\\n`-problem <list_of_tags>` - Displays a random problem of those tags (multiple tags", "instructions on how to use the upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\",", "challenge a user\\n`-endduel` - To end a duel and decide the result (only", "inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems done by a user, grouped by", "the result (only if a duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "create_index_plot_embed(handle, author): \"\"\" Creates an embed with the index plot of a user.", "title=f\"Last {count} solved by {handle}\", description=submissions, color=0xFF0000, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\"", "done by a user, grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the", "name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url,", "embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\" Creates an embed with problem information.", "to use all commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help command`", "search\", enclose the tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "\"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note: ? refers to problems that", "problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by a user, grouped", "of upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user to a", "return Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates an embed with the tags", "<rating>` - Displays a random problem of that rating.\\n`-problem <list_of_tags>` - Displays a", "random problem.\", inline=False) Embed.add_field( name=\"upcoming\", value=\"Displays the list of upcoming Codeforces contests.\", inline=False,", "datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes = (duration.seconds // 60) % 60", "and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in", "a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last n problems solved by", "\"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates an embed with the rating plot", "a user, grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False )", "- Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>` - Displays last", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\" Displays an embed with", "color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\"", "a duel is in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def", "Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags in spoilers if problem[\"tags\"] != \"[]\":", "about a user's last n solved problems. \"\"\" Embed = discord.Embed( title=f\"Last {count}", "= await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests embeds.", "return Embed async def create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions on how", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\" Displays an embed with instructions", "Displays an embed with instructions on how to use the stalk command. \"\"\"", "submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\"", "a random problem of optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem`", "Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author):", "def create_contest_embed(contestList, author): \"\"\" Creates an embed with contest information. \"\"\" Embed =", "an embed with instructions on how to use the upcoming command. \"\"\" Embed", "inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\" Displays an embed with", "of optional rating and/or tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a", "use the plotindex command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems done", "Displays a random problem of that rating.\\n`-problem <list_of_tags>` - Displays a random problem", "\"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, )", "problems done by a user, grouped by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\",", "by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems done by a user,", "\"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another user to a duel over a", "user, grouped by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False )", ") Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem <rating>` - Displays a", "<codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds. \"\"\" async def", "about a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information about a user.\",", "inline=False, ) return Embed async def create_duels_embed(duels): \"\"\" Displays an embed with information", "Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(),", "\"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user", "problems\", description=\"Note: ? refers to problems that do not have a rating on", "such as embeds. \"\"\" import discord import datetime import time from botforces.utils.constants import", "plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url,", ") Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author): \"\"\"", "inline=False, ) if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\",", "create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions on how to use the plotindex", "def create_general_help_embed(author): \"\"\" Displays an embed with instructions on how to use all", "color=0xFF0000) # Adding each contest as a field to the embed for contest", "allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random problem of those tags and rating", "related to Discord-specific features, such as embeds. \"\"\" import discord import datetime import", "inline=False) # Printing the tags in spoilers if problem[\"tags\"] != \"[]\": tags =", "\"binary search\", enclose the tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "in progress).\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotrating_help_embed(author): \"\"\" Displays", "Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing", "60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False,", "embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed with information", "name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>`", "problems done by a user, grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots", "%H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time: {date}", "features, such as embeds. \"\"\" import discord import datetime import time from botforces.utils.constants", "user and \"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\"", "command. \"\"\" Embed = discord.Embed( title=\"user\", description=\"Displays information about a user.\", color=0xFF0000 )", "Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests embeds. \"\"\" async def", "upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user to a duel", "= discord.Embed( title=\"problem\", description=\"Displays a random problem of optional rating and/or tags.\", color=0xFF0000,", "of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "to a duel over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems", "<n>` - Displays last n submissions of the user\", ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "Displays an embed with information about the duel. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}.", "by a user, grouped by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the", "not matter).\\n\\nNote: For tags like \"binary search\", enclose the tag in double quotes.',", "duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\")", "Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in user and \"country\" in", "\"\"\" async def create_general_help_embed(author): \"\"\" Displays an embed with instructions on how to", "spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.add_field( name=\"Duel\",", "\"\"\" Displays an embed with instructions on how to use the plotrating command.", "name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem <rating>` - Displays a random problem", "discord.Embed( title=\"duel\", description=\"Challenges another user to a duel over a problem.\", color=0xFF0000, )", "await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests embeds. \"\"\"", "user, grouped by contest index.\", inline=False, ) Embed.add_field( name=\"plottags\", value=\"Plots the problems done", "time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers", "embed with instructions on how to use the plottags command. \"\"\" Embed =", "Embed \"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\" Creates an embed", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\" Displays an embed with instructions", "with contest information. \"\"\" Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding", "problems done by a user, grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags", "duration.seconds // 3600 minutes = (duration.seconds // 60) % 60 Embed.add_field( name=contest[\"name\"], value=f\"{contest['id']}", "\"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last n problems solved by a", "Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_index_plot_embed(handle, author): \"\"\"", "color=color, ) Embed.set_thumbnail(url=user[\"avatar\"]) if \"firstName\" in user and \"lastName\" in user: Embed.add_field( name=\"Name\",", "information. \"\"\" Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) # Adding each contest", "how to use all commands. \"\"\" Embed = discord.Embed( title=\"Help Menu\", description=\"Type `-help", "title=\"duel\", description=\"Challenges another user to a duel over a problem.\", color=0xFF0000, ) Embed.add_field(", "name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plotindex_help_embed(author): \"\"\"", "text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\" Displays an embed with instructions on", "the problem command. \"\"\" Embed = discord.Embed( title=\"problem\", description=\"Displays a random problem of", "an embed with instructions on how to use the plottags command. \"\"\" Embed", "user, grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url,", "by a user, grouped by rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False", "name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\"", "{opponent.display_name}\", inline=False, ) return Embed async def create_duels_embed(duels): \"\"\" Displays an embed with", "Problem embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\" Creates an embed with problem", "title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False) # Printing the tags in", "an embed with contest information. \"\"\" Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000)", "duel over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\", value=\"Plots the problems done by", "if \"city\" in user and \"country\" in user: Embed.add_field( name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False,", "value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\",", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Graph embeds. \"\"\" async def create_rating_plot_embed(handle, author): \"\"\"", "name=\"City\", value=f\"{user['city']}, {user['country']}\", inline=False, ) if \"rank\" in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False,", "by default).\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=f\"`-stalk <codeforces_handle>` - Displays last {NUMBER_OF_ACS} submissions", "- Displays a random problem of those tags (multiple tags are allowed).\\n`-problem <rating>", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays", "a user, grouped by tags.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plottags <codeforces_handle>`\", inline=False )", "def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed with information about the duel.", "text=str(author)) return Embed async def create_user_help_embed(author): \"\"\" Displays an embed with instructions on", "create_duel_begin_embed(problem, author, opponent): \"\"\" Displays an embed with information about the duel. \"\"\"", "botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services import enclose_tags_in_spoilers \"\"\" User", "not have a rating on Codeforces.\", color=0xFF0000, ) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", ") Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_duel_help_embed(author): \"\"\" Displays", "information about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "an embed with user information. \"\"\" Embed = discord.Embed( title=user[\"handle\"], url=f\"{USER_WEBSITE_URL}{user['handle']}\", color=color, )", "by a user.\", inline=False, ) Embed.add_field(name=\"problem\", value=\"Displays a random problem.\", inline=False) Embed.add_field( name=\"upcoming\",", "use the stalk command. \"\"\" Embed = discord.Embed( title=\"stalk\", description=f\"Displays the last n", "Embed \"\"\" Stalk embeds. \"\"\" async def create_submissions_embed(submissions, count, handle, author): \"\"\" Creates", "return Embed \"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent): \"\"\" Displays", "instructions on how to use the user command. \"\"\" Embed = discord.Embed( title=\"user\",", "def create_user_help_embed(author): \"\"\" Displays an embed with instructions on how to use the", "name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem embeds. \"\"\" async", ") Embed.add_field( name=\"duel\", value=\"Challenges another user to a duel over a problem.\", inline=False,", "a user, grouped by rating.\", inline=False, ) Embed.add_field( name=\"plotindex\", value=\"Plots the problems done", "For tags like \"binary search\", enclose the tag in double quotes.', inline=False, )", "problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Help embeds. \"\"\" async def", "embed with instructions on how to use the upcoming command. \"\"\" Embed =", "rating.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotrating <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed", "discord.Embed( title=\"plotindex\", description=\"Plots the problems done by a user, grouped by contest index.\",", "{user['lastName']}\", inline=False, ) if \"city\" in user and \"country\" in user: Embed.add_field( name=\"City\",", "tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays a random problem of those tags", "last {NUMBER_OF_ACS} submissions of the user\\n`-stalk <codeforces_handle> <n>` - Displays last n submissions", "Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Duel embeds. \"\"\" async def create_duel_begin_embed(problem, author, opponent):", "done by a user, grouped by contest index.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex", "inline=False) if \"rating\" in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "text=str(author)) return Embed \"\"\" Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays an", "Displays an embed with instructions on how to use the plotindex command. \"\"\"", "an embed with the index plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s", ") return Embed async def create_duels_embed(duels): \"\"\" Displays an embed with information about", "color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-plotindex <codeforces_handle>`\", inline=False ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "with the tags plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\",", "url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", description=\"The duel starts now!\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[\"rating\"], inline=False) # Printing the", "command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems done by a user,", "on how to use the upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays", "discord.Embed( title=\"stalk\", description=f\"Displays the last n problems solved by a user ({NUMBER_OF_ACS} by", "enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Upcoming contests embeds. \"\"\" async", "contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours = duration.seconds // 3600 minutes = (duration.seconds", "text=str(author)) return Embed async def create_upcoming_help_embed(author): \"\"\" Displays an embed with instructions on", "Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time: {date} {time.tzname[0]}\", inline=False, ) return Embed", "\"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem:", "text=str(author)) return Embed async def create_problem_help_embed(author): \"\"\" Displays an embed with instructions on", "embed with contest information. \"\"\" Embed = discord.Embed(title=\"List of upcoming contests\", color=0xFF0000) #", "list of upcoming Codeforces contests.\", inline=False, ) Embed.add_field( name=\"duel\", value=\"Challenges another user to", "the problems done by a user, grouped by contest index.\", inline=False, ) Embed.add_field(", "another user to a duel over a problem.\", color=0xFF0000, ) Embed.add_field( name=\"Syntax\", value=\"`-duel", "spoilers if problem[\"tags\"] != \"[]\": tags = await enclose_tags_in_spoilers(problem[\"tags\"]) Embed.add_field(name=\"Tags\", value=tags) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "Embed.add_field( name=\"duel\", value=\"Challenges another user to a duel over a problem.\", inline=False, )", "botforces.utils.services import enclose_tags_in_spoilers \"\"\" User embeds. \"\"\" async def create_user_embed(user, author, color): \"\"\"", "datetime import time from botforces.utils.constants import ( NUMBER_OF_ACS, USER_WEBSITE_URL, PROBLEM_WEBSITE_URL, ) from botforces.utils.services", "- {dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return", "Embed \"\"\" Help embeds. \"\"\" async def create_general_help_embed(author): \"\"\" Displays an embed with", "date.strftime(\"%b %d, %Y, %H:%M\") # Obtaining contest duration duration = datetime.timedelta(seconds=contest[\"durationSeconds\"]) hours =", "command` to learn about a specific command.\", color=0xFF0000, ) Embed.add_field( name=\"user\", value=\"Displays information", "an embed with instructions on how to use the user command. \"\"\" Embed", "the upcoming command. \"\"\" Embed = discord.Embed( title=\"upcoming\", description=\"Displays information about upcoming contests.\",", "use the duel command. \"\"\" Embed = discord.Embed( title=\"duel\", description=\"Challenges another user to", ") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_plottags_help_embed(author): \"\"\" Displays an embed with", "with problem information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\",", "a random problem of those tags and rating (order does not matter).\\n\\nNote: For", "value=f\"{author.display_name} vs {opponent.display_name}\", inline=False, ) return Embed async def create_duels_embed(duels): \"\"\" Displays an", "random problem.\\n`-problem <rating>` - Displays a random problem of that rating.\\n`-problem <list_of_tags>` -", "to use the plotrating command. \"\"\" Embed = discord.Embed( title=\"plotrating\", description=\"Plots the problems", "like \"binary search\", enclose the tag in double quotes.', inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "%Y %H:%M:%S\") Embed.add_field( name=f\"{duel['handle_1']} vs {duel['handle_2']}\", value=f\"Problem: {PROBLEM_WEBSITE_URL}{duel['contestId']}/{duel['contestIndex']}\\nStart Time: {date} {time.tzname[0]}\", inline=False, )", "author, color): \"\"\" Creates an embed with user information. \"\"\" Embed = discord.Embed(", "Embed.add_field( name=\"Syntax\", value='`-problem` - Displays a random problem.\\n`-problem <rating>` - Displays a random", "async def create_plottags_help_embed(author): \"\"\" Displays an embed with instructions on how to use", "information. \"\"\" Embed = discord.Embed( title=f\"{problem['contestId']}{problem['contestIndex']}. {problem['name']}\", url=f\"{PROBLEM_WEBSITE_URL}{problem['contestId']}/{problem['contestIndex']}\", color=0xFF0000, ) Embed.add_field(name=\"Rating\", value=problem[4], inline=False)", "about a user.\", inline=False ) Embed.add_field( name=\"stalk\", value=\"Displays the last n problems solved", "name=\"Syntax\", value=\"`-duel @<discord_user> <optional_rating> <optional_tags>` - To challenge a user\\n`-endduel` - To end", "color=0xFF0000 ) Embed.add_field(name=\"Syntax\", value=\"`-user <codeforces_handle>`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async def create_stalk_help_embed(author):", "rating plot of a user. \"\"\" Embed = discord.Embed( title=f\"{handle}'s solved problems\", description=\"Note:", "= discord.Embed( title=\"plotindex\", description=\"Plots the problems done by a user, grouped by contest", "for duel in duels: date = datetime.datetime.strptime( duel[\"startTime\"], \"%Y-%m-%d %H:%M:%S.%f\" ).strftime(\"%b %d, %Y", "index plot of a user. \"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\")", "\"\"\" Embed = discord.Embed(title=f\"{handle}'s solved problems\", color=0xFF0000) Embed.set_image(url=\"attachment://figure.png\") Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\"", "value=f\"{contest['id']} - {dateString} {time.tzname[0]} - {hours} hrs, {minutes} mins\", inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author))", "value=\"Plots the problems done by a user, grouped by rating.\", inline=False, ) Embed.add_field(", "about upcoming contests.\", color=0xFF0000, ) Embed.add_field(name=\"Syntax\", value=\"`-upcoming`\", inline=False) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed async", "Embed async def create_tags_plot_embed(handle, author): \"\"\" Creates an embed with the tags plot", "an embed with the rating plot of a user. \"\"\" Embed = discord.Embed(", "embeds. \"\"\" async def create_contest_embed(contestList, author): \"\"\" Creates an embed with contest information.", "value=\"Challenges another user to a duel over a problem.\", inline=False, ) Embed.add_field( name=\"plotrating\",", "random problem of those tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` - Displays", "title=\"plottags\", description=\"Plots the problems done by a user, grouped by tags.\", color=0xFF0000, )", "author, opponent): \"\"\" Displays an embed with information about the duel. \"\"\" Embed", "Embed \"\"\" Problem embeds. \"\"\" async def create_problem_embed(problem, author): \"\"\" Creates an embed", "Creates an embed with contest information. \"\"\" Embed = discord.Embed(title=\"List of upcoming contests\",", "\"\"\" Displays an embed with instructions on how to use the user command.", "in user: Embed.add_field( name=\"Rating\", value=user[\"rating\"], inline=False, ) Embed.set_footer(icon_url=author.avatar_url, text=str(author)) return Embed \"\"\" Problem", "\"lastName\" in user: Embed.add_field( name=\"Name\", value=f\"{user['firstName']} {user['lastName']}\", inline=False, ) if \"city\" in user", "create_stalk_help_embed(author): \"\"\" Displays an embed with instructions on how to use the stalk", "in user: Embed.add_field( name=\"Rank\", value=user[\"rank\"].title(), inline=False, ) else: Embed.add_field(name=\"Rank\", value=\"Unranked\", inline=False) if \"rating\"", "command. \"\"\" Embed = discord.Embed( title=\"plotindex\", description=\"Plots the problems done by a user,", "a random problem of those tags (multiple tags are allowed).\\n`-problem <rating> <list_of_tags>` -", "embed with instructions on how to use the duel command. \"\"\" Embed =" ]
[ "True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0 class", "check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp", "io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\", drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_undriven_coreir.json\", \"gold/test_ignore_undriven_coreir.json\")", "= m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0", "inputs (useful for formal verification tools that use undriven inputs to mark wires", "pytest import magma as m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit):", "m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True", "= True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True,", "= True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo", "drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test", "check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with pytest.warns(DeprecationWarning): Bar", "Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2,", "test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I", "class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class", "m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io", "ability to ignore undriven inputs (useful for formal verification tools that use undriven", "\"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit))", "ignore undriven inputs (useful for formal verification tools that use undriven inputs to", "from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io =", "Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_", "any value) \"\"\" import pytest import magma as m from magma.testing import check_files_equal", "io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit),", "terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io", "compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io =", "verification tools that use undriven inputs to mark wires that can take on", "@= foo.O0 # partially undriven io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main,", "undriven inputs (useful for formal verification tools that use undriven inputs to mark", "# For backwards compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class", "io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit):", "I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0 @= io.I0 == io.I1", "\"\"\" import pytest import magma as m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic():", "m.ClockIO() foo = Foo() foo.I0 @= io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\",", "undriven io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert", "the ability to ignore undriven inputs (useful for formal verification tools that use", "take on any value) \"\"\" import pytest import magma as m from magma.testing", "io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\", drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__,", "O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @= io.I0 io.O0 @= foo.O0 # partially", "+ m.ClockIO() foo = Foo() foo.I0 @= io.I0 == io.I1 io.O0 @= foo.O0", "Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main,", "= Foo() foo.I0 @= io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\",", "temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def", "foo.I0 @= io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\", drive_undriven=True, terminate_unused=True)", "foo = Foo() foo.I0 @= io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main,", "test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit),", "= True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0", "m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @=", "inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability", "check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit),", "def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io", "O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\")", "io.I0 io.O0 @= foo.O0 # partially undriven io.O2[0] @= 1 io.O3[0] @= 1", "on any value) \"\"\" import pytest import magma as m from magma.testing import", "O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0 @= io.I0 == io.I1 io.O0 @=", "foo.I0 @= io.I0 io.O0 @= foo.O0 # partially undriven io.O2[0] @= 1 io.O3[0]", "= m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ =", "Foo() foo.I0 @= io.I0 io.O0 @= foo.O0 # partially undriven io.O2[0] @= 1", "test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io +=", "True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True)", "= m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1", "io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo()", "O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit),", "# partially undriven io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True,", "m.Bit])) foo = Foo() foo.I0 @= io.I0 io.O0 @= foo.O0 # partially undriven", "magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit),", "\"\"\" Test the ability to ignore undriven inputs (useful for formal verification tools", "m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0 @= io.I0 ==", "value) \"\"\" import pytest import magma as m from magma.testing import check_files_equal def", "assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with pytest.warns(DeprecationWarning):", "_ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True,", "= m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_", "io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit),", "O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0 @= io.I0 == io.I1 io.O0", "== io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\", drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_undriven_coreir.json\",", "inputs to mark wires that can take on any value) \"\"\" import pytest", "foo.O0 # partially undriven io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True,", "\"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0", "mark wires that can take on any value) \"\"\" import pytest import magma", "O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @= io.I0 io.O0", "= m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__,", "backwards compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io", "(useful for formal verification tools that use undriven inputs to mark wires that", "io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0 @=", "terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with", "m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1)", "foo = Foo() foo.I0 @= io.I0 io.O0 @= foo.O0 # partially undriven io.O2[0]", "O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @= io.I0 io.O0 @= foo.O0", "formal verification tools that use undriven inputs to mark wires that can take", "import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit))", "m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ =", "io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit),", "to ignore undriven inputs (useful for formal verification tools that use undriven inputs", "drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True", "io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def", "for formal verification tools that use undriven inputs to mark wires that can", "Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards", "m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]),", "class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]),", "+= m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]),", "test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit))", "Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1", "@= io.I0 io.O0 @= foo.O0 # partially undriven io.O2[0] @= 1 io.O3[0] @=", "m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For", "@= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir():", "O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ = True io", "class Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO()", "@= io.I0 class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit))", "Foo() foo.I0 @= io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\", drive_undriven=True,", "_ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit]))", "m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @= io.I0 io.O0 @= foo.O0 #", "= Foo() foo.I0 @= io.I0 io.O0 @= foo.O0 # partially undriven io.O2[0] @=", "that can take on any value) \"\"\" import pytest import magma as m", "inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ =", "Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo", "that use undriven inputs to mark wires that can take on any value)", "True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0", "to mark wires that can take on any value) \"\"\" import pytest import", "@= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit),", "_ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo =", "io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_", "m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\",", "For backwards compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit):", "can take on any value) \"\"\" import pytest import magma as m from", "with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit),", "O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @= io.I0 io.O0 @=", "io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__,", "io.I0 class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) +", "\"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit),", "I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io", "\"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with pytest.warns(DeprecationWarning): Bar =", "1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\")", "class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\",", "import magma as m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_", "assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit): _ignore_undriven_ = True io =", "io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp = ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert", "io.O0 @= foo.O0 # partially undriven io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\",", "use undriven inputs to mark wires that can take on any value) \"\"\"", "magma as m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ =", "@= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\",", "True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo =", "I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @= io.I0", "~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): #", "O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io =", "def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I=m.In(m.Bit), O=m.Out(m.Bit)) temp =", "undriven inputs to mark wires that can take on any value) \"\"\" import", "io += m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ = True io =", "O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @= io.I0 class Main(m.Circuit): _ignore_undriven_ = True", "partially undriven io.O2[0] @= 1 io.O3[0] @= 1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True)", "Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit))", "import pytest import magma as m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class", "as m from magma.testing import check_files_equal def test_ignore_unused_undriven_basic(): class Main(m.Circuit): _ignore_undriven_ = True", "@= io.I0 == io.I1 io.O0 @= foo.O0 m.compile(\"build/test_ignore_undriven_coreir\", Main, output=\"coreir\", drive_undriven=True, terminate_unused=True) assert", "_ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io += m.ClockIO() io.O1 @=", "m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit, m.Bit]), O3=m.Out(m.Array[2, m.Bit])) foo = Foo() foo.I0 @=", "tools that use undriven inputs to mark wires that can take on any", "= m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo() foo.I0 @= io.I0", "1 m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class", "\"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\",", "= ~io.I m.compile(\"build/test_ignore_unused_undriven_basic\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_basic.v\", \"gold/test_ignore_unused_undriven_basic.v\") def test_ignore_unused_undriven_hierarchy():", "Test the ability to ignore undriven inputs (useful for formal verification tools that", "= True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO() foo = Foo()", "pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\", m.In(m.Bit)) class Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit),", "Foo(m.Circuit): io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) io.O1 @= io.I0 Bar()(io.I1) class Main(m.Circuit):", "class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bits[2]), I1=m.In(m.Bits[2]), O0=m.Out(m.Bit), O1=m.Out(m.Bit)) + m.ClockIO()", "wires that can take on any value) \"\"\" import pytest import magma as", "Bar()(io.I1) class Main(m.Circuit): _ignore_undriven_ = True io = m.IO(I0=m.In(m.Bit), I1=m.In(m.Bit), O0=m.Out(m.Bit), O1=m.Out(m.Bit), O2=m.Out(m.Tuple[m.Bit,", "m.compile(\"build/test_ignore_unused_undriven_hierarchy\", Main, inline=True, drive_undriven=True, terminate_unused=True) assert check_files_equal(__file__, \"build/test_ignore_unused_undriven_hierarchy.v\", \"gold/test_ignore_unused_undriven_hierarchy.v\") def test_ignore_undriven_coreir(): class Foo(m.Circuit):", "def test_ignore_unused_undriven_hierarchy(): # For backwards compatability test with pytest.warns(DeprecationWarning): Bar = m.DeclareCircuit(\"Bar\", \"I\"," ]
[ "headers)) # type: ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for", "import Optional, List, NamedTuple, Dict, Any import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\")", "data_str = \"\".join(c for c in data_str if c not in [\"(\", \";\",", ") class SNP: def __init__( self, rsid: str, table: Optional[list] = None, description:", "data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c in data_str", "\"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" ) class SNP: def", "not res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table", "{} for row in html_rows: cols = row.find_all(\"td\") if not cols: continue row_data", "List, NamedTuple, Dict, Any import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE =", "((header, str) for header in headers)) # type: ignore html_rows = table.find_all(\"tr\") data:", "\")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup # type: ignore return data", "str) -> SNP: snp_kwargs: Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res =", "re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\"", "cols = row.find_all(\"td\") if not cols: continue row_data = [] for col in", "= tup # type: ignore return data def get_snp_details(rsid: str) -> SNP: snp_kwargs:", "Optional[str] = None ): self.rsid = rsid self.table = table self.description = description", "NamedTuple, Dict, Any import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = (", "self.description = description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\") headers:", "= table.find_all(\"th\") headers: List[str] = [] for header in html_headers: h_str = header.string", "header in html_headers: h_str = header.string if not h_str: link = header.find(\"a\") h_str", "description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if __name__", "-> SNP: snp_kwargs: Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url)", "1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" ) class SNP: def __init__(", "data: Dict[str, DataTuple] = {} for row in html_rows: cols = row.find_all(\"td\") if", "col in cols: data_str = col.string if not data_str: link = col.find(\"a\") data_str", "[] for col in cols: data_str = col.string if not data_str: link =", "= link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c", "continue row_data = [] for col in cols: data_str = col.string if not", "rsid: str, table: Optional[list] = None, description: Optional[str] = None ): self.rsid =", "typing import Optional, List, NamedTuple, Dict, Any import requests import bs4 GENO_REGEX =", "= row.find_all(\"td\") if not cols: continue row_data = [] for col in cols:", "str, table: Optional[list] = None, description: Optional[str] = None ): self.rsid = rsid", "data_str if c not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno]", "rsid self.table = table self.description = description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]:", "def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str] = []", "description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"]", "# type: ignore return data def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any]", "bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string", "[\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup # type: ignore", "data_str = link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for", "for header in html_headers: h_str = header.string if not h_str: link = header.find(\"a\")", "cols: data_str = col.string if not data_str: link = col.find(\"a\") data_str = link.string", "( \"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" ) class SNP:", "None ): self.rsid = rsid self.table = table self.description = description def table_to_dict(table:", "requests.get(snp_url) if not res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text,", "= NamedTuple(\"Row\", ((header, str) for header in headers)) # type: ignore html_rows =", "= [] for col in cols: data_str = col.string if not data_str: link", "c in data_str if c not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup =", "for c in data_str if c not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup", "data def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any] = {} snp_url =", "= bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] =", "# type: ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for row", "snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok: raise Exception(f\"Received code: {res.status_code}", "= table self.description = description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers =", "c not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup", "ignore return data def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any] = {}", "= requests.get(snp_url) if not res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs =", "if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html", "for row in html_rows: cols = row.find_all(\"td\") if not cols: continue row_data =", "table self.description = description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\")", "table: Optional[list] = None, description: Optional[str] = None ): self.rsid = rsid self.table", "in headers)) # type: ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] = {}", "h_str = header.string if not h_str: link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower())", "None, description: Optional[str] = None ): self.rsid = rsid self.table = table self.description", "\"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if", "col.find(\"a\") data_str = link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c", "table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str] = [] for", "self.table = table self.description = description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers", "= table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if", "row in html_rows: cols = row.find_all(\"td\") if not cols: continue row_data = []", "{res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"})", "{\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return", "= link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for header in headers)) #", "= description_html.string return SNP(rsid, **snp_kwargs) if __name__ == \"__main__\": snp = get_snp_details(\"rs28937869\") print(snp.table)", "table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for row in html_rows: cols = row.find_all(\"td\")", "from typing import Optional, List, NamedTuple, Dict, Any import requests import bs4 GENO_REGEX", "re from typing import Optional, List, NamedTuple, Dict, Any import requests import bs4", "= {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok: raise Exception(f\"Received", "from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if", "header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for header in", "row_data = [] for col in cols: data_str = col.string if not data_str:", "GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid;", "in [\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup # type:", "res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table =", "import re from typing import Optional, List, NamedTuple, Dict, Any import requests import", "in data_str if c not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data)", "not cols: continue row_data = [] for col in cols: data_str = col.string", "Any]: html_headers = table.find_all(\"th\") headers: List[str] = [] for header in html_headers: h_str", "if not res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\")", "def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\"", "table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html =", "table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if description_html:", "type: ignore return data def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any] =", "tup # type: ignore return data def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str,", "width:90%;\" ) class SNP: def __init__( self, rsid: str, table: Optional[list] = None,", "import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color:", "header.string if not h_str: link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple =", "= bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\",", "= data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c in data_str if", "DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid,", "h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for header in headers))", "raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\",", "h_str: link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str)", "description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str] =", "if c not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] =", "DataTuple(*row_data) data[tup.geno] = tup # type: ignore return data def get_snp_details(rsid: str) ->", "{} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok: raise Exception(f\"Received code:", "description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if __name__ == \"__main__\":", "tup = DataTuple(*row_data) data[tup.geno] = tup # type: ignore return data def get_snp_details(rsid:", "snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\")", "re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c in data_str if c not in", "cols: continue row_data = [] for col in cols: data_str = col.string if", "bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\":", "requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\"", "html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for row in html_rows: cols", "link = col.find(\"a\") data_str = link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str", "not h_str: link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header,", "in cols: data_str = col.string if not data_str: link = col.find(\"a\") data_str =", "data[tup.geno] = tup # type: ignore return data def get_snp_details(rsid: str) -> SNP:", "= [] for header in html_headers: h_str = header.string if not h_str: link", "for col in cols: data_str = col.string if not data_str: link = col.find(\"a\")", "bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\" + \"border-style:", "\"border-style: solid; margin:1em; width:90%;\" ) class SNP: def __init__( self, rsid: str, table:", "= header.string if not h_str: link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple", "headers: List[str] = [] for header in html_headers: h_str = header.string if not", "table.find_all(\"th\") headers: List[str] = [] for header in html_headers: h_str = header.string if", "code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable", "+ \"border-style: solid; margin:1em; width:90%;\" ) class SNP: def __init__( self, rsid: str,", "solid; margin:1em; width:90%;\" ) class SNP: def __init__( self, rsid: str, table: Optional[list]", "if not h_str: link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\",", "data_str = col.string if not data_str: link = col.find(\"a\") data_str = link.string data_str", "= description def table_to_dict(table: bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str]", "\"\".join(c for c in data_str if c not in [\"(\", \";\", \")\"]) row_data.append(data_str)", "-> Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str] = [] for header in", "table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table =", "= ( \"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" ) class", "= col.string if not data_str: link = col.find(\"a\") data_str = link.string data_str =", "self, rsid: str, table: Optional[list] = None, description: Optional[str] = None ): self.rsid", "Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok: raise", "link = header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for", "= DataTuple(*row_data) data[tup.geno] = tup # type: ignore return data def get_snp_details(rsid: str)", "str) for header in headers)) # type: ignore html_rows = table.find_all(\"tr\") data: Dict[str,", "if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c in data_str if c not", "bs4.element.Tag) -> Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str] = [] for header", "= re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em;", "in html_headers: h_str = header.string if not h_str: link = header.find(\"a\") h_str =", "\";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup # type: ignore return", "row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup # type: ignore return data def", "headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for header in headers)) # type: ignore", "= rsid self.table = table self.description = description def table_to_dict(table: bs4.element.Tag) -> Dict[str,", "return data def get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any] = {} snp_url", "snp_kwargs: Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not", "{snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table:", "bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"]", "= description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if __name__ ==", "bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table)", "type: ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for row in", "): self.rsid = rsid self.table = table self.description = description def table_to_dict(table: bs4.element.Tag)", "Dict[str, Any]: html_headers = table.find_all(\"th\") headers: List[str] = [] for header in html_headers:", "html_headers: h_str = header.string if not h_str: link = header.find(\"a\") h_str = link.string", "in html_rows: cols = row.find_all(\"td\") if not cols: continue row_data = [] for", "DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" )", "ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for row in html_rows:", "= bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] =", "html_rows: cols = row.find_all(\"td\") if not cols: continue row_data = [] for col", "Exception(f\"Received code: {res.status_code} from {snp_url}\") bs = bs4.BeautifulSoup(res.text, \"html.parser\") table = bs.find(\"table\", {\"class\":", "Dict, Any import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border:", "Optional, List, NamedTuple, Dict, Any import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE", "f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\")", "{\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE})", "description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if __name__ == \"__main__\": snp =", "if not cols: continue row_data = [] for col in cols: data_str =", "\"html.parser\") table = bs.find(\"table\", {\"class\": \"sortable smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table", "#FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" ) class SNP: def __init__( self, rsid:", "not in [\"(\", \";\", \")\"]) row_data.append(data_str) tup = DataTuple(*row_data) data[tup.geno] = tup #", "Any import requests import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px;", "__init__( self, rsid: str, table: Optional[list] = None, description: Optional[str] = None ):", "Dict[str, DataTuple] = {} for row in html_rows: cols = row.find_all(\"td\") if not", "not data_str: link = col.find(\"a\") data_str = link.string data_str = data_str.strip() if re.match(GENO_REGEX,", "Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok:", "= table.find_all(\"tr\") data: Dict[str, DataTuple] = {} for row in html_rows: cols =", "= f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if not res.ok: raise Exception(f\"Received code: {res.status_code} from", "header in headers)) # type: ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple] =", "= header.find(\"a\") h_str = link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for header", "SNP: def __init__( self, rsid: str, table: Optional[list] = None, description: Optional[str] =", "html_headers = table.find_all(\"th\") headers: List[str] = [] for header in html_headers: h_str =", "= {} for row in html_rows: cols = row.find_all(\"td\") if not cols: continue", "data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c in data_str if c", "data_str: link = col.find(\"a\") data_str = link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str):", "margin:1em; width:90%;\" ) class SNP: def __init__( self, rsid: str, table: Optional[list] =", "NamedTuple(\"Row\", ((header, str) for header in headers)) # type: ignore html_rows = table.find_all(\"tr\")", "class SNP: def __init__( self, rsid: str, table: Optional[list] = None, description: Optional[str]", "= None ): self.rsid = rsid self.table = table self.description = description def", "get_snp_details(rsid: str) -> SNP: snp_kwargs: Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res", "background-color: #FFFFC0;\" + \"border-style: solid; margin:1em; width:90%;\" ) class SNP: def __init__( self,", "description: Optional[str] = None ): self.rsid = rsid self.table = table self.description =", "DataTuple] = {} for row in html_rows: cols = row.find_all(\"td\") if not cols:", "for header in headers)) # type: ignore html_rows = table.find_all(\"tr\") data: Dict[str, DataTuple]", "res = requests.get(snp_url) if not res.ok: raise Exception(f\"Received code: {res.status_code} from {snp_url}\") bs", "if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if __name__ == \"__main__\": snp", "List[str] = [] for header in html_headers: h_str = header.string if not h_str:", "= col.find(\"a\") data_str = link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str =", "snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if __name__ == \"__main__\": snp = get_snp_details(\"rs28937869\")", "if not data_str: link = col.find(\"a\") data_str = link.string data_str = data_str.strip() if", "= \"\".join(c for c in data_str if c not in [\"(\", \";\", \")\"])", "data_str): data_str = \"\".join(c for c in data_str if c not in [\"(\",", "if description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs)", "row.find_all(\"td\") if not cols: continue row_data = [] for col in cols: data_str", "[] for header in html_headers: h_str = header.string if not h_str: link =", "col.string if not data_str: link = col.find(\"a\") data_str = link.string data_str = data_str.strip()", "SNP: snp_kwargs: Dict[str, Any] = {} snp_url = f\"https://bots.snpedia.com/index.php/{rsid}\" res = requests.get(snp_url) if", "def __init__( self, rsid: str, table: Optional[list] = None, description: Optional[str] = None", "= None, description: Optional[str] = None ): self.rsid = rsid self.table = table", "link.string data_str = data_str.strip() if re.match(GENO_REGEX, data_str): data_str = \"\".join(c for c in", "smwtable\"}) if table: snp_kwargs[\"table\"] = table_to_dict(table) description_table = bs.find(\"table\", {\"style\": DESC_STYLE}) if description_table:", "Optional[list] = None, description: Optional[str] = None ): self.rsid = rsid self.table =", "description_table: description_html = description_table.find(\"td\") if description_html: snp_kwargs[\"description\"] = description_html.string return SNP(rsid, **snp_kwargs) if", "self.rsid = rsid self.table = table self.description = description def table_to_dict(table: bs4.element.Tag) ->", "import bs4 GENO_REGEX = re.compile(r\"\\(.;.\\)\") DESC_STYLE = ( \"border: 1px; background-color: #FFFFC0;\" +", "link.string headers.append(h_str.strip().lower()) DataTuple = NamedTuple(\"Row\", ((header, str) for header in headers)) # type:", "DataTuple = NamedTuple(\"Row\", ((header, str) for header in headers)) # type: ignore html_rows" ]
[ "n = len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] =", "- len(index))] index.extend([last[i] for i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for", "if needed. cost_fun() is defined for multi-objective multi-constraint optimization. Its up to the", "= site & (mu <= 0.5) lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper,", "in range(len(fronts)): front = np.array([k for k in range(len(rank)) if rank[k] == fronts[f]])", "sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"]", "= fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop, self.pop_obj,", "K: number of solutions to be compared :param N: number of solutions to", "best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr, prev_data", "= beta * ((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n // 2, d))", "than zero.') # Note initial state if initial_state is None : best_state, best_obj,", "mate[1::2] = half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise", "arrangement of the system Returns (state, energy, objectives, constraints, data): the best state", "mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return", "crowding distance \"\"\" n, M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank)", ":], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in zip(select, data):", "j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate,", "of dominated population elements [[]*N] :param dominated_by_counter: counter of elements dominating (Nx1) :returns:", "= [] for index_a in current_front: # reduce the numbers of domination to", "self.pop_data, evals, front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if", "(Nx1) :returns: ranks: an array with the ranks max_rank: max rank \"\"\" N", ":returns: dominator: the index of the dominator, None if i and j are", "\"\"\" N = len(dominate) ranks = np.inf * np.ones(N) current_rank = 1 #", "Parameters state : an initial arrangement of the system Returns (state, energy, objectives,", "# Restore previous state state, obj, cstr, data, value = prev_state, prev_obj, prev_cstr,", "- lower[temp]) * \\ (np.power(2. * mu[temp] + (1. - 2. * mu[temp])", "its set of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become", "if( they become non dominated - then they are part of next front)", "function if needed. cost_fun() is defined for multi-objective multi-constraint optimization. Its up to", "- 1.) temp = site & (mu > 0.5) norm = (upper[temp] -", "< steps: step += 1 T = t_max * math.exp(cooling_factor * step /", "-1000 no sim, 1000 meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values =", "in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non dominated - then they are", "pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i] - fmin[i]) if fmax[i] != fmin[i]", "mu[temp]) * np.power(1. - norm, dis_m + 1.), 1. / (dis_m + 1))", "from the in the last from index = [i for i in range(len(fronts))", "2, d))) beta[np.random.random((n // 2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n // 2,", "front_no) evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals", "\\ (np.power(2. * mu[temp] + (1. - 2. * mu[temp]) * np.power(1. -", "pop_cstr: population constraint violation (Nx1) :returns: ranks: an array with the ranks max_rank:", "self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals > 0: mating_pool = tournament(front_no,", "numbers :return: crowding distance \"\"\" n, M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts", "index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2,", "vectors :return: the objective, constraints, and additional data vectors \"\"\" n = x.shape[0]", "parameter_values def half_tournemant(rank, cdist): n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank =", "j_dominates_i: return j, i return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and", "prev_cstr, prev_data, prev_value else: # Accept new state and compare to best state", "... ] parameters = [\"name\", ... ] min = [min, ... ] max", "in range(N): for j in range(i+1,N): #constrained pareto dominance if pop_cstr[i] == pop_cstr[j]:", "of domination to the ones in its set of dominance for index_b in", "= v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs)", "fronts = fronts[fronts != np.inf] for f in range(len(fronts)): front = np.array([k for", "1.), 1. / (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec", "if initial_state is None : best_state, best_obj, best_cstr, best_data = problem.initialize(1) else: best_state,", "= t_max * math.exp(cooling_factor * step / steps) state, obj, cstr, data =", "parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n = len(rank) index_a", "distance # arguably they could be refractored out of this function fronts, max_front", "< pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j", ":param n: number of selected individuals :return: next generation population ( decison vars,", "additional data vectors \"\"\" n = x.shape[0] obj = np.zeros((n, 1)) cstr =", "~decision_a]] def tournament(rank, cdist): ''' tournament selection :param K: number of solutions to", "for i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index],", "pareto dominance dominator, dominated = objective_dominance(pop_obj, i, j) if dominator is not None:", "rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for i", "sets the ranks of the population elements using the fast non-dominated sorting method.", "[i for i in range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n -", "> 0.5] = np.power(2 * mu[mu > 0.5], -1 / (dis_c + 1))", "parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank,", "- norm, dis_m + 1.), 1. / (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec,", "device sizes self.values update is self.value += self.value + action*(self.ranges[:,1] - self.ranges[:, 0])", "= False for obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j =", "of pop_dec once d != self.d :param pop_dec: decision vectors :return: \"\"\" dis_c", ":param pop_dec: decision vectors :return: \"\"\" dis_c = 10 dis_m = 20 pop_dec", "decison vars, objectives, constraints, data, rank, and cdist) ''' # fast non-dominated sorting", "# if( they become non dominated - then they are part of next", "cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV = 100*(value - prev_value)", "needed. cost_fun() is defined for multi-objective multi-constraint optimization. Its up to the single", "-1 / (dis_c + 1)) beta = beta * ((-1)** np.random.randint(2, size=(n //", "i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1, len(front) -", "index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non dominated - then they", "0.5], 1 / (dis_c + 1)) beta[mu > 0.5] = np.power(2 * mu[mu", "a minimum temperature greater than zero.') # Note initial state if initial_state is", "= 1 beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1, d))] = 1 offspring_dec", "else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr,", "and (not j_dominates_i): return i, j if (not i_dominates_j) and j_dominates_i: return j,", "best_data, best_value, = state, obj, cstr, data, value # Return best state and", "set of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non", "best_cstr, best_data = problem.initialize(1) else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state =", "1. / (dis_m + 1)) - 1.) temp = site & (mu >", "+= (upper[temp] - lower[temp]) * \\ (np.power(2. * mu[temp] + (1. - 2.", "dominated, None if i and j are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j", "if rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for", "best_obj, best_cstr, best_data = problem.initialize(1) else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state", "(pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize the population :param N:", "0]) - Outouts: observation, reward, done, {} - observations array of concat [", "< 0.5] = 1 beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1, d))] =", "- parent_2_dec) / 2)) site = np.random.random((n, d)) < mutation mu = np.random.random((n,", "to debug the cost function. # In analog IC optimization we will use", "* mu[mu > 0.5], -1 / (dis_c + 1)) beta = beta *", "yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9),", "\"\"\" def __init__(self, d, min, max): self.d = d self.upper = max self.lower", "itertools import repeat from collections import Sequence class Problem(object): \"\"\" The problem related", "index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] >", "larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament selection", "eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank,", "(mu <= 0.5) lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm", "crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr =", "for holding the domination info required for fast nd sorting dominate = [[]", "of solutions to be selected :param fit: fitness vectors :return: index of selected", "1]], i)] ) / ((fmax[i] - fmin[i]) if fmax[i] != fmin[i] else 1.0)", "range(len(rank)) if rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0)", "0.0: raise ValueError('Exponential cooling requires a minimum temperature greater than zero.') # Note", "1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self, parameter_values): \"\"\" Inputs:", "j): \"\"\" Computes objective-wise dominance between elements i and j of the population.", "initialize(self, N): \"\"\" initialize the population :param N: number of elements in the", "self.upper = max self.lower = min def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self,", "= 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] =", "in the paper. Created on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA - Adapted", "return best_state, best_value if __name__ == '__main__': seed = 17 np.random.seed(seed) sat_conditions =", "crossover, (1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2 +", "population. extra elements are only from the in the last from index =", "front numbers :return: crowding distance \"\"\" n, M = np.shape(pop_obj) crowd_dis = np.zeros(n)", "can be usefull to debug the cost function. # In analog IC optimization", "= 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] =", "n: number of selected individuals :return: next generation population ( decison vars, objectives,", "if dV > 0.0 and math.exp(-dV / T) < np.random.random(): # Restore previous", "extra elements are only from the in the last from index = [i", "cstr, data, value # Return best state and energy return best_state, best_value if", "counter of elements dominating (Nx1) :returns: ranks: an array with the ranks max_rank:", "\"\"\" turn decision vectors into individuals :param pop_vars: decision vectors :return: (pop_vars, pop_obj,", "1000 meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1]", "individuals :return: next generation population ( decison vars, objectives, constraints, data, rank, and", "0.5) norm = (upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp]", "#constrained pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator, dominated =", "= 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"]", "= problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in zip(select, data): self.pop_data[i] = v", "50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd':", "fronts) #Select elements from all fronts except the last. Note that fnd_sort only", "cdist) return mate def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance between elements", "0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05", "pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def", "= 10000, t_max = 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): '''", "ValueError('Exponential cooling requires a minimum temperature greater than zero.') # Note initial state", "(n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :] parent_2_dec = pop_dec[n", "elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j and (not j_dominates_i):", "= problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj,", "boundary of pop_dec once d != self.d :param pop_dec: decision vectors :return: \"\"\"", "sorting and crowding distance # arguably they could be refractored out of this", "& (mu <= 0.5) lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1))", ":return: \"\"\" dis_c = 10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) // 2)", "0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :],", "< self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data", "= half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i, j):", "np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts != np.inf] for f in range(len(fronts)):", "pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals, index =", "0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a)", "dominated_by_counter[i] == 0] while np.sum(ranks < np.inf) < N/2: ranks[current_front] = current_rank next_front", "best_state, best_obj, best_cstr, best_data, best_value, = state, obj, cstr, data, value # Return", "and j are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j = False j_dominates_i =", "self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop)", "((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n // 2, d)) < 0.5] =", "values. Extending classes should round integers in the cost function if needed. cost_fun()", ": 45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' :", "used in the optimization # it can be usefull to debug the cost", "j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign", "fmax[i] != fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n):", "[i for i in range(N) if dominated_by_counter[i] == 0] while np.sum(ranks < np.inf)", "= np.random.random((n, d)) temp = site & (mu <= 0.5) lower, upper =", "crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)]", "and energy return best_state, best_value if __name__ == '__main__': seed = 17 np.random.seed(seed)", "index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) < self.pop.shape[0] : select =", "> 0.5], -1 / (dis_c + 1)) beta = beta * ((-1)** np.random.randint(2,", "dV > 0.0 and math.exp(-dV / T) < np.random.random(): # Restore previous state", "individuals :param pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data", "return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of the population", "fast nd sorting dominate = [[] for x in range(N)] dominated_by_counter = np.zeros(N,", "requires a minimum temperature greater than zero.') # Note initial state if initial_state", "dtype=int) for i in range(N): for j in range(i+1,N): #constrained pareto dominance if", "fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from all", "in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for", "individual(self, pop_vars): \"\"\" turn decision vectors into individuals :param pop_vars: decision vectors :return:", "!= np.inf] for f in range(len(fronts)): front = np.array([k for k in range(len(rank))", "pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n //", "\"\"\" NSGA-II algorithm \"\"\" if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data =", "cost_fun(self, x): \"\"\" calculate the objective and constraints vectors :param x: the decision", "0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1", "j are non-dominated dominated: the index of the dominated, None if i and", "and compare to best state prev_state, prev_obj, prev_cstr, prev_data, prev_value = state, obj,", "Generate offspring individuals :param boundary: lower and upper boundary of pop_dec once d", "d))] = 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta *", "variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param boundary: lower and upper", "convert_multi_obj = default_mo_2_so): ''' Minimizes the energy of a system by simulated annealing.", "paper. Created on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II", "= 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] =", "upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] - lower[temp])", "None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the ranks of the population", "domination info required for fast nd sorting dominate = [[] for x in", "self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data,", "store the simulation outputs data = np.zeros((n, 1)) return obj, cstr, data def", "the last. Note that fnd_sort only #sorts half the population. extra elements are", "rank \"\"\" N,M = pop_obj.shape # structures for holding the domination info required", "cdist): n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]]", "no sim, 1000 meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values", "1)) beta[mu > 0.5] = np.power(2 * mu[mu > 0.5], -1 / (dis_c", "rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament selection :param", "cstr, data def individual(self, pop_vars): \"\"\" turn decision vectors into individuals :param pop_vars:", "len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j", "value # Return best state and energy return best_state, best_value if __name__ ==", "class Problem(object): \"\"\" The problem related parameters and variation operators of cross over", "obj, cstr, data, value = prev_state, prev_obj, prev_cstr, prev_data, prev_value else: # Accept", "the numbers of domination to the ones in its set of dominance for", "variation operators of cross over and mutation for GA, and move for SA.", "initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr,", "= evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates", "* np.power(1. - norm, dis_m + 1.), 1. / (dis_m + 1.))) offspring_dec", "vectors :param rank: front numbers :return: crowding distance \"\"\" n, M = np.shape(pop_obj)", ":return: crowding distance \"\"\" n, M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts =", "np.zeros((n, 1)) cstr = np.zeros(n) # data associated with the solutions but not", "in its set of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they", "#sorts half the population. extra elements are only from the in the last", "only from the in the last from index = [i for i in", "sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"]", "= 100*(value - prev_value) if dV > 0.0 and math.exp(-dV / T) <", "with the ranks max_rank: max rank \"\"\" N = len(dominate) ranks = np.inf", "= 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the energy", "# j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1", "d)) temp = site & (mu <= 0.5) lower, upper = np.tile(self.lower, (n,", "- len(index))) for i, v in zip(select, data): self.pop_data[i] = v return self.pop,", "dominated: the index of the dominated, None if i and j are non-dominated", "population ( decison vars, objectives, constraints, data, rank, and cdist) ''' # fast", "n): ''' Environmental selection in NSGA-II :param population: current population :param n: number", "self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover )) self.pop", "and move for SA. Parameters are handled in variation operators as real values.", "* mu[temp]) * np.power(1. - norm, dis_m + 1.), 1. / (dis_m +", "problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec))", "in variation operators as real values. Extending classes should round integers in the", "except the last. Note that fnd_sort only #sorts half the population. extra elements", "i in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf", "i in range(N) if dominated_by_counter[i] == 0] while np.sum(ranks < np.inf) < N/2:", "state state, obj, cstr, data, value = prev_state, prev_obj, prev_cstr, prev_data, prev_value else:", "an initial arrangement of the system Returns (state, energy, objectives, constraints, data): the", "// 2, d)) mu = np.random.random((n // 2, d)) beta[mu <= 0.5] =", "objective_dominance(pop_obj, i, j) if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] <", "return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament selection :param K: number of", "state, obj, cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV = 100*(value", "0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom':", "generation population ( decison vars, objectives, constraints, data, rank, and cdist) ''' #", "sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"]", "+ parent_2_dec) / 2 - beta * (parent_1_dec - parent_2_dec) / 2)) site", "prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr)", "beta[np.random.random((n // 2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n // 2, 1)) >", "= half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance", "selection :param K: number of solutions to be compared :param N: number of", "they could be refractored out of this function fronts, max_front = fnd_sort(pop_obj, pop_cstr)", "False) sa = SA() print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r iter {}:", "0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1", "of selected individuals :return: next generation population ( decison vars, objectives, constraints, data,", "evals, front_no while evals > 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr,", "i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks", "crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover=", "sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"]", "/ ((fmax[i] - fmin[i]) if fmax[i] != fmin[i] else 1.0) return crowd_dis def", "crowding_distance(pop_obj, fronts) #Select elements from all fronts except the last. Note that fnd_sort", "range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx]", "< pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i =", "in the population :return: the initial population \"\"\" pop_dec = (np.random.random((N, self.d)) *", "Tmax to Tmin cooling_factor = -math.log(t_max / t_min) # Attempt moves to new", "* (self.upper - self.lower)) + self.lower return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6):", "pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize the population :param N: number", "best_value, = state, obj, cstr, data, value # Return best state and energy", "/ (dis_c + 1)) beta = beta * ((-1)** np.random.randint(2, size=(n // 2,", "''' if t_min <= 0.0: raise ValueError('Exponential cooling requires a minimum temperature greater", "] parameters = [\"name\", ... ] min = [min, ... ] max =", "for i in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] =", "state prev_state, prev_obj, prev_cstr, prev_data, prev_value = state, obj, cstr, data, value if", "as np import math from itertools import repeat from collections import Sequence class", "the in the last from index = [i for i in range(len(fronts)) if", "from all fronts except the last. Note that fnd_sort only #sorts half the", "/ self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n =", "0 # Precompute factor for exponential cooling from Tmax to Tmin cooling_factor =", "larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return", "= np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values =", "constraint violation (Nx1) :returns: ranks: an array with the ranks max_rank: max rank", "* \\ (np.power(2. * mu[temp] + (1. - 2. * mu[temp]) * np.power(1.", "best_value: best_state, best_obj, best_cstr, best_data, best_value, = state, obj, cstr, data, value #", "\"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self,", "= 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] =", "500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop is None: self.pop,", "dominate = [[] for x in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i", "last = [i for i in range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[:", "= len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank,", "Minimizes the energy of a system by simulated annealing. Parameters state : an", "i in range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i]", "= [[] for x in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i in", "= np.inf * np.ones(N) current_rank = 1 # if non dominated is part", "initial arrangement of the system Returns (state, energy, objectives, constraints, data): the best", "t_min <= 0.0: raise ValueError('Exponential cooling requires a minimum temperature greater than zero.')", "solutions ''' n = len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist)", "of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non dominated", "i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return return assign_rank(dominate, dominated_by_counter)", "def initialize(self, N): \"\"\" initialize the population :param N: number of elements in", "size=(n // 2, d))) beta[np.random.random((n // 2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n", "Nov, 2020 @author: <NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to handle", "evals = evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove", "all fronts except the last. Note that fnd_sort only #sorts half the population.", "offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta * (parent_1_dec - parent_2_dec)", "''' import numpy as np import math from itertools import repeat from collections", "0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05", "= [min, ... ] max = [max, ... ] \"\"\" def __init__(self, d,", "= 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] =", ": select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0]", "import math from itertools import repeat from collections import Sequence class Problem(object): \"\"\"", "in range(len(rank)) if rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0) fmin = pop_obj[front,", "self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data", "compared :param N: number of solutions to be selected :param fit: fitness vectors", "from collections import Sequence class Problem(object): \"\"\" The problem related parameters and variation", "= 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] =", "0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1", "self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data =", "> 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool,", "self.lower return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param", "in zip(select, data): self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs,", "+ 2. * (mu[temp] - 0.5) * np.power(1. - norm, dis_m + 1.),", "2, 1)) > crossover, (1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec)", "and constraints vectors :param x: the decision vectors :return: the objective, constraints, and", "self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover", "= pop_dec[n // 2:, :] beta = np.zeros((n // 2, d)) mu =", "code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import math from itertools import", "crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts != np.inf] for f", "\"front_id\" :param pop_obj: objective vectors :param rank: front numbers :return: crowding distance \"\"\"", "d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :] parent_2_dec = pop_dec[n //", "pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i] -", "np.power(1. - norm, dis_m + 1.), 1. / (dis_m + 1.))) offspring_dec =", "= np.zeros(N, dtype=int) for i in range(N): for j in range(i+1,N): #constrained pareto", "fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr,", "the domination info required for fast nd sorting dominate = [[] for x", "fitness vectors :return: index of selected solutions ''' n = len(rank) mate =", "new state and compare to best state prev_state, prev_obj, prev_cstr, prev_data, prev_value =", "- reward +1 improved, -1 worsen, -1000 no sim, 1000 meet specs \"\"\"", "pop_vars): \"\"\" turn decision vectors into individuals :param pop_vars: decision vectors :return: (pop_vars,", "last from index = [i for i in range(len(fronts)) if fronts[i] < max_front]", "energy of a system by simulated annealing. Parameters state : an initial arrangement", "using the fast non-dominated sorting method. :param dominate: list of dominated population elements", "0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1", "''' Minimizes the energy of a system by simulated annealing. Parameters state :", "np.zeros(N, dtype=int) for i in range(N): for j in range(i+1,N): #constrained pareto dominance", "fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental", "- beta * (parent_1_dec - parent_2_dec) / 2)) site = np.random.random((n, d)) <", "population elements using the fast non-dominated sorting method. :param dominate: list of dominated", "= 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] =", "rank): \"\"\" The crowding distance of the Pareto front \"front_id\" :param pop_obj: objective", "= d self.upper = max self.lower = min def __str__(self): return \"Target: {}\".format(self.target)", "return i, j if (not i_dominates_j) and j_dominates_i: return j, i return None,", "< rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament selection :param K:", "/ T) < np.random.random(): # Restore previous state state, obj, cstr, data, value", "upper), lower) return offspring_dec def move(self, parameter_values): \"\"\" Inputs: - value - new", "for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non dominated - then", "lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (1. - np.power( 2. *", "= state, obj, cstr, data, value if value < best_value: best_state, best_obj, best_cstr,", "objectives :param i, j: the elems being compared :returns: dominator: the index of", "self.values update is self.value += self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts:", "Parameters are handled in variation operators as real values. Extending classes should round", "array with the ranks max_rank: max rank \"\"\" N,M = pop_obj.shape # structures", "# Accept new state and compare to best state prev_state, prev_obj, prev_cstr, prev_data,", "- observations array of concat [ values, measures] - reward +1 improved, -1", "for index_a in current_front: # reduce the numbers of domination to the ones", "they become non dominated - then they are part of next front) if", "0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1", "_,M = pop_obj.shape i_dominates_j = False j_dominates_i = False for obj_idx in range(M):", "are only from the in the last from index = [i for i", "are non-dominated dominated: the index of the dominated, None if i and j", "True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j and (not", "''' n = len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2]", "non-dominated sorting method. :param pop_obj: population objectives (NxM) :param pop_cstr: population constraint violation", "objective-wise dominance between elements i and j of the population. :param pop_obj: the", "obj, cstr, data, value # Return best state and energy return best_state, best_value", "index_a in current_front: # reduce the numbers of domination to the ones in", "ranks = np.inf * np.ones(N) current_rank = 1 # if non dominated is", "number of selected individuals :return: next generation population ( decison vars, objectives, constraints,", "norm, dis_m + 1.), 1. / (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper),", "crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\", "// 2, d)) beta[mu <= 0.5] = np.power(2 * mu[mu <= 0.5], 1", "are part of next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front", "sa = SA() print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r iter {}: {}\".format(iter,", "// 2, :] parent_2_dec = pop_dec[n // 2:, :] beta = np.zeros((n //", "dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non dominated - then they are part", "N/2: ranks[current_front] = current_rank next_front = [] for index_a in current_front: # reduce", "len(dominate) ranks = np.inf * np.ones(N) current_rank = 1 # if non dominated", "data = np.zeros((n, 1)) return obj, cstr, data def individual(self, pop_vars): \"\"\" turn", "[ values, measures] - reward +1 improved, -1 worsen, -1000 no sim, 1000", "offspring_dec def move(self, parameter_values): \"\"\" Inputs: - value - new device sizes self.values", "the index of the dominated, None if i and j are non-dominated \"\"\"", "cstr, data, value if value < best_value: best_state, best_obj, best_cstr, best_data, best_value, =", "+ 1)) beta[mu > 0.5] = np.power(2 * mu[mu > 0.5], -1 /", "* 2][:] (n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :] parent_2_dec", "(1. - mu[temp]) + 2. * (mu[temp] - 0.5) * np.power(1. - norm,", "self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis =", "def individual(self, pop_vars): \"\"\" turn decision vectors into individuals :param pop_vars: decision vectors", "= np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts != np.inf]", "single objective optimizers to implement objective weigthing. objectives = [\"name\", ... ] parameters", "= problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr,", "np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta * (parent_1_dec - parent_2_dec) / 2,", "self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data,", "2, d)) beta[mu <= 0.5] = np.power(2 * mu[mu <= 0.5], 1 /", "dominated is part of front 1*/ current_front = [i for i in range(N)", "data associated with the solutions but not used in the optimization # it", "[max, ... ] \"\"\" def __init__(self, d, min, max): self.d = d self.upper", "{} - observations array of concat [ values, measures] - reward +1 improved,", "... ] \"\"\" def __init__(self, d, min, max): self.d = d self.upper =", "the objective and constraints vectors :param x: the decision vectors :return: the objective,", "- Outouts: observation, reward, done, {} - observations array of concat [ values,", "of the population elements using the fast non-dominated sorting method. :param dominate: list", "self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data", "pop_dec[:n // 2, :] parent_2_dec = pop_dec[n // 2:, :] beta = np.zeros((n", "Problem(object): \"\"\" The problem related parameters and variation operators of cross over and", "self.lower = min def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate", "step < steps: step += 1 T = t_max * math.exp(cooling_factor * step", "for j in range(i+1,N): #constrained pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto", "import repeat from collections import Sequence class Problem(object): \"\"\" The problem related parameters", "Tmin cooling_factor = -math.log(t_max / t_min) # Attempt moves to new states while", "= -math.log(t_max / t_min) # Attempt moves to new states while step <", "None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1", "pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N):", "= 10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:] (n,", "- self.ranges[:, 0]) - Outouts: observation, reward, done, {} - observations array of", "crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from all fronts except the last. Note", "for fast nd sorting dominate = [[] for x in range(N)] dominated_by_counter =", "self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return", "N,M = pop_obj.shape # structures for holding the domination info required for fast", "self.d = d self.upper = max self.lower = min def __str__(self): return \"Target:", "debug the cost function. # In analog IC optimization we will use this", "\"\"\" n = x.shape[0] obj = np.zeros((n, 1)) cstr = np.zeros(n) # data", "- offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\", "value = convert_multi_obj(obj, cstr) dV = 100*(value - prev_value) if dV > 0.0", "[i for i in range(len(fronts)) if fronts[i] < max_front] last = [i for", "fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i in delta_n])", "2 - beta * (parent_1_dec - parent_2_dec) / 2)) site = np.random.random((n, d))", "fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8,", "= 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions)", "(1. - 2. * mu[temp]) * np.power(1. - norm, dis_m + 1.), 1.", "required for fast nd sorting dominate = [[] for x in range(N)] dominated_by_counter", "= pop_obj[front, :].min(0) for i in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] =", "energy found. ''' if t_min <= 0.0: raise ValueError('Exponential cooling requires a minimum", "2 + beta * (parent_1_dec - parent_2_dec) / 2, (parent_1_dec + parent_2_dec) /", "circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa", "optimization and fast non-dominated sorting SA - Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py", "= best_state, best_obj, best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step =", "\"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0])", "self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0,", "\"\"\" n, M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts =", "np.power(1. - norm, dis_m + 1.), 1. / (dis_m + 1)) - 1.)", "best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute factor", "of the population elements using the fast non-dominated sorting method. :param pop_obj: population", "= 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] =", "Pareto front \"front_id\" :param pop_obj: objective vectors :param rank: front numbers :return: crowding", "np.random.random((n // 2, d)) beta[mu <= 0.5] = np.power(2 * mu[mu <= 0.5],", "out of this function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts)", "if __name__ == '__main__': seed = 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] =", "sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"]", "= fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from all fronts except", "best_cstr) step = 0 # Precompute factor for exponential cooling from Tmax to", "(dis_c + 1)) beta[mu > 0.5] = np.power(2 * mu[mu > 0.5], -1", "this # data to store the simulation outputs data = np.zeros((n, 1)) return", "outputs data = np.zeros((n, 1)) return obj, cstr, data def individual(self, pop_vars): \"\"\"", "return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize the population :param", "N = len(dominate) ranks = np.inf * np.ones(N) current_rank = 1 # if", "data): the best state and energy found. ''' if t_min <= 0.0: raise", "dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator, dominated = objective_dominance(pop_obj, i,", "of this function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select", "next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding distance of", "of the system Returns (state, energy, objectives, constraints, data): the best state and", "= [i for i in range(N) if dominated_by_counter[i] == 0] while np.sum(ranks <", "= 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw':", "- self.lower)) + self.lower return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate", "the Pareto front \"front_id\" :param pop_obj: objective vectors :param rank: front numbers :return:", ":param dominated_by_counter: counter of elements dominating (Nx1) :returns: ranks: an array with the", "data def individual(self, pop_vars): \"\"\" turn decision vectors into individuals :param pop_vars: decision", ":return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj,", "dominated population elements [[]*N] :param dominated_by_counter: counter of elements dominating (Nx1) :returns: ranks:", "> cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def", "factor for exponential cooling from Tmax to Tmin cooling_factor = -math.log(t_max / t_min)", "nd sorting dominate = [[] for x in range(N)] dominated_by_counter = np.zeros(N, dtype=int)", "max rank \"\"\" N = len(dominate) ranks = np.inf * np.ones(N) current_rank =", "from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import math from", "* step / steps) state, obj, cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj,", "= (upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp])", "state, obj, cstr, data, value = prev_state, prev_obj, prev_cstr, prev_data, prev_value else: #", "is None : best_state, best_obj, best_cstr, best_data = problem.initialize(1) else: best_state, best_obj, best_cstr,", "pop_dec[n // 2:, :] beta = np.zeros((n // 2, d)) mu = np.random.random((n", "= cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a,", "action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1])", "sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"]", "return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index], fronts[index], crowd_dis[index],index class NSGA2:", "self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data,", "ranks of the population elements using the fast non-dominated sorting method. :param dominate:", "i in index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 *", ":param pop_obj: the value of the populations' objectives :param i, j: the elems", "j_dominates_i): return i, j if (not i_dominates_j) and j_dominates_i: return j, i return", "(np.random.random((N, self.d)) * (self.upper - self.lower)) + self.lower return pop_dec def variation(self, pop_dec,", "for iter, stats in sa.minimize(circuit): print(\"\\r iter {}: {}\".format(iter, stats)) print(sa.best_state) print(circuit.simulate(sa.best_state)) print(circuit.target.verify(circuit.simulate(sa.best_state)))", "Return best state and energy return best_state, best_value if __name__ == '__main__': seed", "best_obj, best_cstr, best_data, best_value, = state, obj, cstr, data, value # Return best", "pop_data, n): ''' Environmental selection in NSGA-II :param population: current population :param n:", "and variation operators of cross over and mutation for GA, and move for", "self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals", "35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa = SA() print(circuit) for iter, stats", "= [\"name\", ... ] min = [min, ... ] max = [max, ...", "<= 0.5) lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm =", "non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j = False j_dominates_i = False for obj_idx", "then they are part of next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front", "non-dominated sorting method. :param dominate: list of dominated population elements [[]*N] :param dominated_by_counter:", "elements are only from the in the last from index = [i for", "on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to", "the value of the populations' objectives :param i, j: the elems being compared", "def simulated_annealing(problem, steps = 10000, t_max = 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj", "value of the populations' objectives :param i, j: the elems being compared :returns:", "optimizers to implement objective weigthing. objectives = [\"name\", ... ] parameters = [\"name\",", "0.5], -1 / (dis_c + 1)) beta = beta * ((-1)** np.random.randint(2, size=(n", "to store the simulation outputs data = np.zeros((n, 1)) return obj, cstr, data", "< N/2: ranks[current_front] = current_rank next_front = [] for index_a in current_front: #", "= np.power(2 * mu[mu > 0.5], -1 / (dis_c + 1)) beta =", "= rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist),", "= environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size yield self.pop, self.pop_obj,", "= 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] =", "and fast non-dominated sorting SA - Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py '''", "system Returns (state, energy, objectives, constraints, data): the best state and energy found.", "the population. extra elements are only from the in the last from index", "-math.log(t_max / t_min) # Attempt moves to new states while step < steps:", "constraints, and additional data vectors \"\"\" n = x.shape[0] obj = np.zeros((n, 1))", "np.zeros(n) # data associated with the solutions but not used in the optimization", "+1 improved, -1 worsen, -1000 no sim, 1000 meet specs \"\"\" action =", "None : best_state, best_obj, best_cstr, best_data = problem.initialize(1) else: best_state, best_obj, best_cstr, best_data", "/ (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (np.power(2. *", "+= (upper[temp] - lower[temp]) * \\ (1. - np.power( 2. * (1. -", "it can be usefull to debug the cost function. # In analog IC", "if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front current_rank+=1 return ranks, current_rank-1 def", "dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj,", "= (np.random.random((N, self.d)) * (self.upper - self.lower)) + self.lower return pop_dec def variation(self,", "dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j dominates", "/ (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (1. -", "\"\"\" sets the ranks of the population elements using the fast non-dominated sorting", "The problem related parameters and variation operators of cross over and mutation for", ":param fit: fitness vectors :return: index of selected solutions ''' n = len(rank)", "observation, reward, done, {} - observations array of concat [ values, measures] -", "= prev_value = convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute factor for exponential", "offspring individuals :param boundary: lower and upper boundary of pop_dec once d !=", "= 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the energy of a system", "] max = [max, ... ] \"\"\" def __init__(self, d, min, max): self.d", "current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding distance of the Pareto front \"front_id\"", "for i, v in zip(select, data): self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr,", "selection in NSGA-II :param population: current population :param n: number of selected individuals", "dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else:", "return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental selection in NSGA-II", "problem.initialize(1) else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state = best_state prev_state, prev_obj,", "= np.zeros(n) # data associated with the solutions but not used in the", "= parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values", "/ t_min) # Attempt moves to new states while step < steps: step", "np import math from itertools import repeat from collections import Sequence class Problem(object):", "= 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] =", "obj, cstr, data def individual(self, pop_vars): \"\"\" turn decision vectors into individuals :param", "for multi-objective multi-constraint optimization. Its up to the single objective optimizers to implement", "def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance between elements i and j", "parameters = [\"name\", ... ] min = [min, ... ] max = [max,", "prev_data = best_state, best_obj, best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step", "> crossover, (1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2", "return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the ranks of", "cost function. # In analog IC optimization we will use this # data", "= pop_obj.shape # structures for holding the domination info required for fast nd", "= 1 # if non dominated is part of front 1*/ current_front =", "problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr)", "in range(len(fronts)) if fronts[i] < max_front] last = [i for i in range(len(fronts))", "if t_min <= 0.0: raise ValueError('Exponential cooling requires a minimum temperature greater than", "np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a =", "pop_obj: population objectives (NxM) :param pop_cstr: population constraint violation (Nx1) :returns: ranks: an", "data, rank, and cdist) ''' # fast non-dominated sorting and crowding distance #", "front 1*/ current_front = [i for i in range(N) if dominated_by_counter[i] == 0]", "is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j dominates i", "sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"]", "= np.inf for j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ (", "mutation, crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr", "step = 0 # Precompute factor for exponential cooling from Tmax to Tmin", "(upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) *", "select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] -", "objective weigthing. objectives = [\"name\", ... ] parameters = [\"name\", ... ] min", "1 / (dis_c + 1)) beta[mu > 0.5] = np.power(2 * mu[mu >", "0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1", "len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist)", "crowding distance # arguably they could be refractored out of this function fronts,", "- self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return", "2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1,", "the population :param N: number of elements in the population :return: the initial", "= 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] =", "structures for holding the domination info required for fast nd sorting dominate =", "cstr) : return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps = 10000, t_max =", "self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals -", "NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II", "to new states while step < steps: step += 1 T = t_max", "half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance between", "max_rank: max rank \"\"\" N = len(dominate) ranks = np.inf * np.ones(N) current_rank", "related parameters and variation operators of cross over and mutation for GA, and", "number of solutions to be selected :param fit: fitness vectors :return: index of", "current population :param n: number of selected individuals :return: next generation population (", "fmax = pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for i in range(M): sorted_index", "def __init__(self, d, min, max): self.d = d self.upper = max self.lower =", "* (parent_1_dec - parent_2_dec) / 2)) site = np.random.random((n, d)) < mutation mu", "900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions =", "the single objective optimizers to implement objective weigthing. objectives = [\"name\", ... ]", "the elems being compared :returns: dominator: the index of the dominator, None if", "dominate: list of dominated population elements [[]*N] :param dominated_by_counter: counter of elements dominating", "1)) beta = beta * ((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n //", "usefull to debug the cost function. # In analog IC optimization we will", "method. :param dominate: list of dominated population elements [[]*N] :param dominated_by_counter: counter of", "axis=0, return_index =True) if len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select,", "best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute factor for", "data vectors \"\"\" n = x.shape[0] obj = np.zeros((n, 1)) cstr = np.zeros(n)", "previous state state, obj, cstr, data, value = prev_state, prev_obj, prev_cstr, prev_data, prev_value", "def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the ranks of the population elements", "mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i,", "Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import math", "real values. Extending classes should round integers in the cost function if needed.", "for k in range(len(rank)) if rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0) fmin", "range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i in range(N): for j in range(i+1,N):", "class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\"", "None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data =", "data): self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) :", "sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900}", ") / ((fmax[i] - fmin[i]) if fmax[i] != fmin[i] else 1.0) return crowd_dis", "= np.unique(rank) fronts = fronts[fronts != np.inf] for f in range(len(fronts)): front =", "''' tournament selection :param K: number of solutions to be compared :param N:", "pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize the population :param N: number of", "dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d) =", "reduce the numbers of domination to the ones in its set of dominance", "= 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] =", "data, value = prev_state, prev_obj, prev_cstr, prev_data, prev_value else: # Accept new state", "of a system by simulated annealing. Parameters state : an initial arrangement of", "if dominated_by_counter[i] == 0] while np.sum(ranks < np.inf) < N/2: ranks[current_front] = current_rank", "are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j = False j_dominates_i = False for", "for x in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i in range(N): for", "(not j_dominates_i): return i, j if (not i_dominates_j) and j_dominates_i: return j, i", "state : an initial arrangement of the system Returns (state, energy, objectives, constraints,", "for obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True elif", "of front 1*/ current_front = [i for i in range(N) if dominated_by_counter[i] ==", "\"\"\" The crowding distance of the Pareto front \"front_id\" :param pop_obj: objective vectors", "self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals", "convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute factor for exponential cooling from Tmax", "the ranks max_rank: max rank \"\"\" N,M = pop_obj.shape # structures for holding", "Extending classes should round integers in the cost function if needed. cost_fun() is", "in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx] >", "+ 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self, parameter_values): \"\"\"", "!= fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): '''", "+= \\ ( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)] )", "of cross over and mutation for GA, and move for SA. Parameters are", "* mu[temp] + (1. - 2. * mu[temp]) * np.power(1. - norm, dis_m", "= np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return mate", "dV = 100*(value - prev_value) if dV > 0.0 and math.exp(-dV / T)", "= pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n", "if len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :],", "number of elements in the population :return: the initial population \"\"\" pop_dec =", "1)) - 1.) temp = site & (mu > 0.5) norm = (upper[temp]", "pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True", "pop_obj: objective vectors :param rank: front numbers :return: crowding distance \"\"\" n, M", "def cost_fun(self, x): \"\"\" calculate the objective and constraints vectors :param x: the", "for i in range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))]", ":], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in", "dominator: the index of the dominator, None if i and j are non-dominated", "for f in range(len(fronts)): front = np.array([k for k in range(len(rank)) if rank[k]", "steps = 10000, t_max = 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so):", "sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"]", "- then they are part of next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b)", "prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data best_value = prev_value =", "(1. - np.power( 2. * (1. - mu[temp]) + 2. * (mu[temp] -", "= prev_state, prev_obj, prev_cstr, prev_data, prev_value else: # Accept new state and compare", "2. * (mu[temp] - 0.5) * np.power(1. - norm, dis_m + 1.), 1.", "np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index],", "np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n", ")) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr))", "only #sorts half the population. extra elements are only from the in the", "algorithm \"\"\" if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else:", "implentations of Simulated Annealing and NSGA-II used in the paper. Created on Nov,", "(parent_1_dec - parent_2_dec) / 2, (parent_1_dec + parent_2_dec) / 2 - beta *", "+ 1.), 1. / (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return", "self.d :param pop_dec: decision vectors :return: \"\"\" dis_c = 10 dis_m = 20", "i, j if (not i_dominates_j) and j_dominates_i: return j, i return None, None", "in range(i+1,N): #constrained pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator,", "in NSGA-II :param population: current population :param n: number of selected individuals :return:", "np.random.random(): # Restore previous state state, obj, cstr, data, value = prev_state, prev_obj,", "dominated_by_counter): \"\"\" sets the ranks of the population elements using the fast non-dominated", "they are part of next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front =", "self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr def", "in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i in range(N): for j in", "j) if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: #", "ranks of the population elements using the fast non-dominated sorting method. :param pop_obj:", ":param pop_obj: population objectives (NxM) :param pop_cstr: population constraint violation (Nx1) :returns: ranks:", "1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental selection in", "max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop,", "minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\"", "sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"]", "evals, front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index)", "and j of the population. :param pop_obj: the value of the populations' objectives", "of the Pareto front \"front_id\" :param pop_obj: objective vectors :param rank: front numbers", "d, min, max): self.d = d self.upper = max self.lower = min def", "\"\"\" dis_c = 10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) // 2) *", "move(self, parameter_values): \"\"\" Inputs: - value - new device sizes self.values update is", "dominator, None if i and j are non-dominated dominated: the index of the", "simulated_annealing(problem, steps = 10000, t_max = 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj =", "convert_multi_obj(obj, cstr) dV = 100*(value - prev_value) if dV > 0.0 and math.exp(-dV", "lower) return offspring_dec def move(self, parameter_values): \"\"\" Inputs: - value - new device", "i)] - pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i] - fmin[i]) if fmax[i]", "population elements using the fast non-dominated sorting method. :param pop_obj: population objectives (NxM)", ": an initial arrangement of the system Returns (state, energy, objectives, constraints, data):", "len(index))) for i, v in zip(select, data): self.pop_data[i] = v return self.pop, self.pop_obj,", "from itertools import repeat from collections import Sequence class Problem(object): \"\"\" The problem", "= self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize the", "beta = beta * ((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n // 2,", "weigthing. objectives = [\"name\", ... ] parameters = [\"name\", ... ] min =", "associated with the solutions but not used in the optimization # it can", "- prev_value) if dV > 0.0 and math.exp(-dV / T) < np.random.random(): #", "prev_value) if dV > 0.0 and math.exp(-dV / T) < np.random.random(): # Restore", "0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1", "once d != self.d :param pop_dec: decision vectors :return: \"\"\" dis_c = 10", "- lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (1. - np.power( 2.", "number of solutions to be compared :param N: number of solutions to be", "cdist) ''' # fast non-dominated sorting and crowding distance # arguably they could", "dominated_by_counter: counter of elements dominating (Nx1) :returns: ranks: an array with the ranks", "def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps = 10000,", "non-dominated sorting SA - Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy", "the fast non-dominated sorting method. :param dominate: list of dominated population elements [[]*N]", "d)) beta[mu <= 0.5] = np.power(2 * mu[mu <= 0.5], 1 / (dis_c", "observations array of concat [ values, measures] - reward +1 improved, -1 worsen,", "= np.random.random((n // 2, d)) beta[mu <= 0.5] = np.power(2 * mu[mu <=", "crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental selection in NSGA-II :param", "1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp])", "crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None):", "= [i for i in range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n", "optimization we will use this # data to store the simulation outputs data", "index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i,", "the decision vectors :return: the objective, constraints, and additional data vectors \"\"\" n", "elements using the fast non-dominated sorting method. :param pop_obj: population objectives (NxM) :param", "constraint optimization and fast non-dominated sorting SA - Addapted from <NAME>'s code from", "] \"\"\" def __init__(self, d, min, max): self.d = d self.upper = max", "Inputs: - value - new device sizes self.values update is self.value += self.value", "# fast non-dominated sorting and crowding distance # arguably they could be refractored", "this function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements", "self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals > 0: mating_pool = tournament(front_no, crowd_dis)", "index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament selection :param K: number of solutions", "Updated to handle constraint optimization and fast non-dominated sorting SA - Addapted from", "pop_data) def initialize(self, N): \"\"\" initialize the population :param N: number of elements", "!= self.d :param pop_dec: decision vectors :return: \"\"\" dis_c = 10 dis_m =", "and cdist) ''' # fast non-dominated sorting and crowding distance # arguably they", "the populations' objectives :param i, j: the elems being compared :returns: dominator: the", "values, measures] - reward +1 improved, -1 worsen, -1000 no sim, 1000 meet", "pop_obj[front, :].min(0) for i in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf", "use this # data to store the simulation outputs data = np.zeros((n, 1))", "steps: step += 1 T = t_max * math.exp(cooling_factor * step / steps)", "0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def", "+ 1.), 1. / (dis_m + 1)) - 1.) temp = site &", "parent_2_dec) / 2, (parent_1_dec + parent_2_dec) / 2 - beta * (parent_1_dec -", "problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if", "next generation population ( decison vars, objectives, constraints, data, rank, and cdist) '''", "self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v", "the energy of a system by simulated annealing. Parameters state : an initial", "10000, t_max = 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes", "i, j) if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]:", "population. :param pop_obj: the value of the populations' objectives :param i, j: the", "function. # In analog IC optimization we will use this # data to", "np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n // 2, d)) < 0.5] = 1", "+ self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals", "dominance dominator, dominated = objective_dominance(pop_obj, i, j) if dominator is not None: dominate[dominator].append(dominated)", "= 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] =", "the objective, constraints, and additional data vectors \"\"\" n = x.shape[0] obj =", ":param pop_obj: objective vectors :param rank: front numbers :return: crowding distance \"\"\" n,", "data, value if value < best_value: best_state, best_obj, best_cstr, best_data, best_value, = state,", "(dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self, parameter_values):", ":param x: the decision vectors :return: the objective, constraints, and additional data vectors", "dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and", "delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index], fronts[index], crowd_dis[index],index class", "\"\"\" N,M = pop_obj.shape # structures for holding the domination info required for", "mate def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance between elements i and", "variation operators as real values. Extending classes should round integers in the cost", "dominance between elements i and j of the population. :param pop_obj: the value", "N): \"\"\" initialize the population :param N: number of elements in the population", "2, :] parent_2_dec = pop_dec[n // 2:, :] beta = np.zeros((n // 2,", "rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]])", "obj, cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV = 100*(value -", "np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n = len(rank) index_a = np.arange(n)", "= np.zeros((n, 1)) cstr = np.zeros(n) # data associated with the solutions but", "self.lower)) + self.lower return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring", "(n, 1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] - lower[temp]) / (upper[temp] -", "np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ =", "population elements [[]*N] :param dominated_by_counter: counter of elements dominating (Nx1) :returns: ranks: an", "i_dominates_j) and j_dominates_i: return j, i return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\"", "== 0: next_front.append(index_b) current_front = next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank):", "= np.random.random((n, d)) < mutation mu = np.random.random((n, d)) temp = site &", "compare to best state prev_state, prev_obj, prev_cstr, prev_data, prev_value = state, obj, cstr,", "of solutions to be compared :param N: number of solutions to be selected", "mu[mu > 0.5], -1 / (dis_c + 1)) beta = beta * ((-1)**", "sizes self.values update is self.value += self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) -", "# arguably they could be refractored out of this function fronts, max_front =", "2)) site = np.random.random((n, d)) < mutation mu = np.random.random((n, d)) temp =", ":return: the initial population \"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper - self.lower))", "/ 2 - beta * (parent_1_dec - parent_2_dec) / 2)) site = np.random.random((n,", "and NSGA-II used in the paper. Created on Nov, 2020 @author: <NAME> <<EMAIL>>", "dominated_by_counter = np.zeros(N, dtype=int) for i in range(N): for j in range(i+1,N): #constrained", "len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select],", "zero.') # Note initial state if initial_state is None : best_state, best_obj, best_cstr,", "return_index =True) if len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :],", "100*(value - prev_value) if dV > 0.0 and math.exp(-dV / T) < np.random.random():", "initial_state is None : best_state, best_obj, best_cstr, best_data = problem.initialize(1) else: best_state, best_obj,", "selected :param fit: fitness vectors :return: index of selected solutions ''' n =", "https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import math from itertools import repeat from", "SA - Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np", "max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i in delta_n]) return", "M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts !=", "sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"]", "pop_dec = (np.random.random((N, self.d)) * (self.upper - self.lower)) + self.lower return pop_dec def", "N: number of solutions to be selected :param fit: fitness vectors :return: index", "i_dominates_j and (not j_dominates_i): return i, j if (not i_dominates_j) and j_dominates_i: return", "method. :param pop_obj: population objectives (NxM) :param pop_cstr: population constraint violation (Nx1) :returns:", "\"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper - self.lower)) + self.lower return pop_dec", "non-dominated sorting and crowding distance # arguably they could be refractored out of", "self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n = len(rank)", "defined for multi-objective multi-constraint optimization. Its up to the single objective optimizers to", "assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of the population elements using the fast", "= np.array([k for k in range(len(rank)) if rank[k] == fronts[f]]) fmax = pop_obj[front,", "should round integers in the cost function if needed. cost_fun() is defined for", "objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance between elements i and j of", "assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of the population elements", "sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in", "if (not i_dominates_j) and j_dominates_i: return j, i return None, None def fnd_sort(pop_obj,", "parent_2_dec = pop_dec[n // 2:, :] beta = np.zeros((n // 2, d)) mu", "ones in its set of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if(", "... ] max = [max, ... ] \"\"\" def __init__(self, d, min, max):", "self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps", "gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)],", "Its up to the single objective optimizers to implement objective weigthing. objectives =", "step / steps) state, obj, cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr)", "decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return", "objective vectors :param rank: front numbers :return: crowding distance \"\"\" n, M =", "/ (dis_c + 1)) beta[mu > 0.5] = np.power(2 * mu[mu > 0.5],", "@author: <NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization", "- lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (np.power(2. * mu[temp] +", "= np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop,", "* np.power(1. - norm, dis_m + 1.), 1. / (dis_m + 1)) -", "1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i] - fmin[i]) if", "front \"front_id\" :param pop_obj: objective vectors :param rank: front numbers :return: crowding distance", "= site & (mu > 0.5) norm = (upper[temp] - offspring_dec[temp]) / (upper[temp]", "* (1. - mu[temp]) + 2. * (mu[temp] - 0.5) * np.power(1. -", "sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps = 10000, t_max = 1500.0, t_min =", "best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute", "pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data)", "{}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the objective and constraints vectors :param x:", "the solutions but not used in the optimization # it can be usefull", "/ 2, (parent_1_dec + parent_2_dec) / 2 - beta * (parent_1_dec - parent_2_dec)", "else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis", "in the last from index = [i for i in range(len(fronts)) if fronts[i]", "n = x.shape[0] obj = np.zeros((n, 1)) cstr = np.zeros(n) # data associated", "decision vectors :return: \"\"\" dis_c = 10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec)", "cross over and mutation for GA, and move for SA. Parameters are handled", "= np.zeros((n // 2, d)) mu = np.random.random((n // 2, d)) beta[mu <=", "Created on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated", "(parent_1_dec - parent_2_dec) / 2)) site = np.random.random((n, d)) < mutation mu =", "n, M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts", "half_tournemant(rank, cdist): n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] ==", "simulated annealing. Parameters state : an initial arrangement of the system Returns (state,", "beta[mu > 0.5] = np.power(2 * mu[mu > 0.5], -1 / (dis_c +", "elements in the population :return: the initial population \"\"\" pop_dec = (np.random.random((N, self.d))", "index = [i for i in range(len(fronts)) if fronts[i] < max_front] last =", "best_cstr, best_data, best_value, = state, obj, cstr, data, value # Return best state", "((fmax[i] - fmin[i]) if fmax[i] != fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec,", "# Attempt moves to new states while step < steps: step += 1", "exponential cooling from Tmax to Tmin cooling_factor = -math.log(t_max / t_min) # Attempt", "- new device sizes self.values update is self.value += self.value + action*(self.ranges[:,1] -", "arguably they could be refractored out of this function fronts, max_front = fnd_sort(pop_obj,", "reward +1 improved, -1 worsen, -1000 no sim, 1000 meet specs \"\"\" action", "== fronts[f]]) fmax = pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for i in", "np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a", "<= 0.5], 1 / (dis_c + 1)) beta[mu > 0.5] = np.power(2 *", "pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for i in range(M): sorted_index = np.argsort(pop_obj[front,", ":param dominate: list of dominated population elements [[]*N] :param dominated_by_counter: counter of elements", "pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator, dominated = objective_dominance(pop_obj, i, j) if", "0: next_front.append(index_b) current_front = next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\"", "np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values", "fronts[fronts != np.inf] for f in range(len(fronts)): front = np.array([k for k in", "prev_obj, prev_cstr, prev_data, prev_value else: # Accept new state and compare to best", "= 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] =", "(dis_m + 1)) - 1.) temp = site & (mu > 0.5) norm", "self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values", "non dominated - then they are part of next front) if dominated_by_counter[index_b] ==", "handled in variation operators as real values. Extending classes should round integers in", "objectives = [\"name\", ... ] parameters = [\"name\", ... ] min = [min,", "= crowding_distance(pop_obj, fronts) #Select elements from all fronts except the last. Note that", "that fnd_sort only #sorts half the population. extra elements are only from the", "range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]], i)] -", "j if (not i_dominates_j) and j_dominates_i: return j, i return None, None def", "dominated = objective_dominance(pop_obj, i, j) if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif", "operators of cross over and mutation for GA, and move for SA. Parameters", "the ones in its set of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 #", "> 0.0 and math.exp(-dV / T) < np.random.random(): # Restore previous state state,", "/ (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self,", "0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1", "0.5) * np.power(1. - norm, dis_m + 1.), 1. / (dis_m + 1.)))", "greater than zero.') # Note initial state if initial_state is None : best_state,", "could be refractored out of this function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis", "In analog IC optimization we will use this # data to store the", "= pop_obj.shape i_dominates_j = False j_dominates_i = False for obj_idx in range(M): if", "objective optimizers to implement objective weigthing. objectives = [\"name\", ... ] parameters =", "(np.power(2. * mu[temp] + (1. - 2. * mu[temp]) * np.power(1. - norm,", "new device sizes self.values update is self.value += self.value + action*(self.ranges[:,1] - self.ranges[:,", "dominated_by_counter[index_b]-=1 # if( they become non dominated - then they are part of", "Note initial state if initial_state is None : best_state, best_obj, best_cstr, best_data =", "data, value # Return best state and energy return best_state, best_value if __name__", "0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd',", "not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j dominates i dominate[j].append(i)", "to best state prev_state, prev_obj, prev_cstr, prev_data, prev_value = state, obj, cstr, data,", "# data to store the simulation outputs data = np.zeros((n, 1)) return obj,", "the ranks of the population elements using the fast non-dominated sorting method. :param", "None if i and j are non-dominated dominated: the index of the dominated,", "max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from all fronts", "= 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem(", "np.random.random((n, d)) < mutation mu = np.random.random((n, d)) temp = site & (mu", "site = np.random.random((n, d)) < mutation mu = np.random.random((n, d)) temp = site", "self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop,", "def crowding_distance(pop_obj, rank): \"\"\" The crowding distance of the Pareto front \"front_id\" :param", "classes should round integers in the cost function if needed. cost_fun() is defined", "VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa = SA()", "= np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index)))", ":param rank: front numbers :return: crowding distance \"\"\" n, M = np.shape(pop_obj) crowd_dis", "IC optimization we will use this # data to store the simulation outputs", "is self.value += self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation, reward,", "/ steps) state, obj, cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV", "numpy as np import math from itertools import repeat from collections import Sequence", "= problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV = 100*(value - prev_value) if dV", ":] beta = np.zeros((n // 2, d)) mu = np.random.random((n // 2, d))", "constraints, data, rank, and cdist) ''' # fast non-dominated sorting and crowding distance", "mu[temp]) + 2. * (mu[temp] - 0.5) * np.power(1. - norm, dis_m +", "fronts except the last. Note that fnd_sort only #sorts half the population. extra", "if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator, dominated = objective_dominance(pop_obj, i, j)", "dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return", "array of concat [ values, measures] - reward +1 improved, -1 worsen, -1000", "from Tmax to Tmin cooling_factor = -math.log(t_max / t_min) # Attempt moves to", "1)) cstr = np.zeros(n) # data associated with the solutions but not used", "evals > 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual(", "tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation,", "v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) +", "violation (Nx1) :returns: ranks: an array with the ranks max_rank: max rank \"\"\"", "def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of the population elements using the", "range(i+1,N): #constrained pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator, dominated", "v in zip(select, data): self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def", "np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] +=", "move for SA. Parameters are handled in variation operators as real values. Extending", "https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and fast non-dominated sorting SA - Addapted", "delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i in delta_n]) return pop_dec[index,:],", "2, (parent_1_dec + parent_2_dec) / 2 - beta * (parent_1_dec - parent_2_dec) /", "t_max * math.exp(cooling_factor * step / steps) state, obj, cstr, data = problem.individual(problem.move(state))", ":param pop_cstr: population constraint violation (Nx1) :returns: ranks: an array with the ranks", "// 2, d))) beta[np.random.random((n // 2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n //", "return offspring_dec def move(self, parameter_values): \"\"\" Inputs: - value - new device sizes", "= problem.initialize(1) else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state = best_state prev_state,", "elements dominating (Nx1) :returns: ranks: an array with the ranks max_rank: max rank", "mu = np.random.random((n // 2, d)) beta[mu <= 0.5] = np.power(2 * mu[mu", "prev_value else: # Accept new state and compare to best state prev_state, prev_obj,", "if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j", ":return: the objective, constraints, and additional data vectors \"\"\" n = x.shape[0] obj", "state if initial_state is None : best_state, best_obj, best_cstr, best_data = problem.initialize(1) else:", "''' # fast non-dominated sorting and crowding distance # arguably they could be", "objectives (NxM) :param pop_cstr: population constraint violation (Nx1) :returns: ranks: an array with", "- pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals, index", "min = [min, ... ] max = [max, ... ] \"\"\" def __init__(self,", "#remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) < self.pop.shape[0]", "pop_dec once d != self.d :param pop_dec: decision vectors :return: \"\"\" dis_c =", "pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param boundary: lower", "cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]]", "an array with the ranks max_rank: max rank \"\"\" N = len(dominate) ranks", "lower and upper boundary of pop_dec once d != self.d :param pop_dec: decision", "if i and j are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j = False", "0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1", "self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size)", "np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for", "j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]],", "pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop", "the paper. Created on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA - Adapted from", "return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param boundary:", "objective, constraints, and additional data vectors \"\"\" n = x.shape[0] obj = np.zeros((n,", "\"\"\" calculate the objective and constraints vectors :param x: the decision vectors :return:", "2, d)) mu = np.random.random((n // 2, d)) beta[mu <= 0.5] = np.power(2", "\\ (1. - np.power( 2. * (1. - mu[temp]) + 2. * (mu[temp]", "compared :returns: dominator: the index of the dominator, None if i and j", "constraints, data): the best state and energy found. ''' if t_min <= 0.0:", "i and j of the population. :param pop_obj: the value of the populations'", "self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True)", "j_dominates_i = True if i_dominates_j and (not j_dominates_i): return i, j if (not", "fronts[f]]) fmax = pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for i in range(M):", "initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the energy of a system by simulated", "0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05", "0.0 and math.exp(-dV / T) < np.random.random(): # Restore previous state state, obj,", "prev_obj, prev_cstr, prev_data, prev_value = state, obj, cstr, data, value if value <", "\\ ( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)] ) /", "pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j and (not j_dominates_i): return i, j", "return j, i return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets", "prev_value = state, obj, cstr, data, value if value < best_value: best_state, best_obj,", "dominated_by_counter[j]+=1 #assign the ranks and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\"", "np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05", "+ 1)) - 1.) temp = site & (mu > 0.5) norm =", "= VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa =", "Returns (state, energy, objectives, constraints, data): the best state and energy found. '''", "def tournament(rank, cdist): ''' tournament selection :param K: number of solutions to be", "reward, done, {} - observations array of concat [ values, measures] - reward", "cdist) mate[1::2] = half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i, j): \"\"\" Computes", "distance \"\"\" n, M = np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts", "i, j: the elems being compared :returns: dominator: the index of the dominator,", "data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV = 100*(value - prev_value) if", "parent_2_dec) / 2 - beta * (parent_1_dec - parent_2_dec) / 2)) site =", "upper boundary of pop_dec once d != self.d :param pop_dec: decision vectors :return:", "if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i in", "steps) state, obj, cstr, data = problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV =", "fmin = pop_obj[front, :].min(0) for i in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]]", "mu[temp] + (1. - 2. * mu[temp]) * np.power(1. - norm, dis_m +", "the system Returns (state, energy, objectives, constraints, data): the best state and energy", "value = prev_state, prev_obj, prev_cstr, prev_data, prev_value else: # Accept new state and", "''' Environmental selection in NSGA-II :param population: current population :param n: number of", "#Select elements from all fronts except the last. Note that fnd_sort only #sorts", "1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa = SA() print(circuit) for", "rank \"\"\" N = len(dominate) ranks = np.inf * np.ones(N) current_rank = 1", "__name__ == '__main__': seed = 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05", "dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj,", "== '__main__': seed = 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"]", "return obj, cstr, data def individual(self, pop_vars): \"\"\" turn decision vectors into individuals", "cdist): ''' tournament selection :param K: number of solutions to be compared :param", "solutions to be compared :param N: number of solutions to be selected :param", "np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp]", "= 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] =", "optimization. Its up to the single objective optimizers to implement objective weigthing. objectives", "+ action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation, reward, done, {} - observations", "(Nx1) :returns: ranks: an array with the ranks max_rank: max rank \"\"\" N,M", "= np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta * (parent_1_dec - parent_2_dec) /", "* np.ones(N) current_rank = 1 # if non dominated is part of front", "new states while step < steps: step += 1 T = t_max *", "(not i_dominates_j) and j_dominates_i: return j, i return None, None def fnd_sort(pop_obj, pop_cstr):", "1500.0, t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the energy of", "evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals,", "the ranks max_rank: max rank \"\"\" N = len(dominate) ranks = np.inf *", "# Return best state and energy return best_state, best_value if __name__ == '__main__':", "range(len(fronts)): front = np.array([k for k in range(len(rank)) if rank[k] == fronts[f]]) fmax", "annealing. Parameters state : an initial arrangement of the system Returns (state, energy,", "- Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and fast non-dominated sorting", "max self.lower = min def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\"", "sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc':", "= x.shape[0] obj = np.zeros((n, 1)) cstr = np.zeros(n) # data associated with", "= 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] =", "- value - new device sizes self.values update is self.value += self.value +", "the best state and energy found. ''' if t_min <= 0.0: raise ValueError('Exponential", "Precompute factor for exponential cooling from Tmax to Tmin cooling_factor = -math.log(t_max /", "self.pop_cstr, self.pop_data, evals, front_no while evals > 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec,", "'__main__': seed = 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] =", "90.0},gt=gt), discrete_actions = False) sa = SA() print(circuit) for iter, stats in sa.minimize(circuit):", "'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions", "0] while np.sum(ranks < np.inf) < N/2: ranks[current_front] = current_rank next_front = []", "1 beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1, d))] = 1 offspring_dec =", "range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i", "sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"]", "(parent_1_dec + parent_2_dec) / 2 - beta * (parent_1_dec - parent_2_dec) / 2))", "default_mo_2_so): ''' Minimizes the energy of a system by simulated annealing. Parameters state", "the population elements using the fast non-dominated sorting method. :param pop_obj: population objectives", "action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation, reward, done, {} - observations array", "step += 1 T = t_max * math.exp(cooling_factor * step / steps) state,", "= False j_dominates_i = False for obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx]", "Annealing and NSGA-II used in the paper. Created on Nov, 2020 @author: <NAME>", "i_dominates_j = False j_dominates_i = False for obj_idx in range(M): if pop_obj[i,obj_idx] <", ":].max(0) fmin = pop_obj[front, :].min(0) for i in range(M): sorted_index = np.argsort(pop_obj[front, i])", "are handled in variation operators as real values. Extending classes should round integers", "self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj,", "self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front", "j of the population. :param pop_obj: the value of the populations' objectives :param", "< mutation mu = np.random.random((n, d)) temp = site & (mu <= 0.5)", "fast non-dominated sorting SA - Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import", "pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index], fronts[index], crowd_dis[index],index class NSGA2: def", "initial state if initial_state is None : best_state, best_obj, best_cstr, best_data = problem.initialize(1)", "i and j are non-dominated dominated: the index of the dominated, None if", "i, j): \"\"\" Computes objective-wise dominance between elements i and j of the", "prev_data, prev_value else: # Accept new state and compare to best state prev_state,", "> pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j and (not j_dominates_i): return i,", "-1 worsen, -1000 no sim, 1000 meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values))", "mu = np.random.random((n, d)) temp = site & (mu <= 0.5) lower, upper", "return parameter_values def half_tournemant(rank, cdist): n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank", "while step < steps: step += 1 T = t_max * math.exp(cooling_factor *", "dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return return assign_rank(dominate, dominated_by_counter) def", "system by simulated annealing. Parameters state : an initial arrangement of the system", ":param boundary: lower and upper boundary of pop_dec once d != self.d :param", "self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj,", "= pop_obj[front, :].max(0) fmin = pop_obj[front, :].min(0) for i in range(M): sorted_index =", "decision vectors into individuals :param pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\"", "False j_dominates_i = False for obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] :", "np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data", "/ (dis_m + 1)) - 1.) temp = site & (mu > 0.5)", "pop_cstr[i] < pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates", "GA, and move for SA. Parameters are handled in variation operators as real", "... ] min = [min, ... ] max = [max, ... ] \"\"\"", "multi-objective multi-constraint optimization. Its up to the single objective optimizers to implement objective", "decision vectors :return: the objective, constraints, and additional data vectors \"\"\" n =", "optimization # it can be usefull to debug the cost function. # In", "math from itertools import repeat from collections import Sequence class Problem(object): \"\"\" The", "a system by simulated annealing. Parameters state : an initial arrangement of the", "1)) return obj, cstr, data def individual(self, pop_vars): \"\"\" turn decision vectors into", "t_max = 1500.0, t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the", "self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front =", "but not used in the optimization # it can be usefull to debug", "in current_front: # reduce the numbers of domination to the ones in its", "the population elements using the fast non-dominated sorting method. :param dominate: list of", "45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt),", "(n - len(index))] index.extend([last[i] for i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i]", "= 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] =", "= 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0,", "= default_mo_2_so): ''' Minimizes the energy of a system by simulated annealing. Parameters", "pop_obj.shape # structures for holding the domination info required for fast nd sorting", "self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index", "is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data", "self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no,", "by simulated annealing. Parameters state : an initial arrangement of the system Returns", "\"\"\" Inputs: - value - new device sizes self.values update is self.value +=", "( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i]", "\"\"\" initialize the population :param N: number of elements in the population :return:", "0.5] = 1 beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1, d))] = 1", "t_min) # Attempt moves to new states while step < steps: step +=", "np.unique(rank) fronts = fronts[fronts != np.inf] for f in range(len(fronts)): front = np.array([k", "range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j", "beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec", "tournament selection :param K: number of solutions to be compared :param N: number", "population :param n: number of selected individuals :return: next generation population ( decison", "T = t_max * math.exp(cooling_factor * step / steps) state, obj, cstr, data", "- mu[temp]) + 2. * (mu[temp] - 0.5) * np.power(1. - norm, dis_m", ":return: index of selected solutions ''' n = len(rank) mate = np.zeros(n, dtype=np.int16)", "1)) > crossover, (1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) /", "best_obj, best_cstr, best_data = problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr, prev_data =", ":].min(0) for i in range(M): sorted_index = np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]]", "the dominator, None if i and j are non-dominated dominated: the index of", "sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"]", "to be compared :param N: number of solutions to be selected :param fit:", "multi-constraint optimization. Its up to the single objective optimizers to implement objective weigthing.", "self.pop_data, evals, front_no while evals > 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj,", "worsen, -1000 no sim, 1000 meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values", "of next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front current_rank+=1 return", "== 0] while np.sum(ranks < np.inf) < N/2: ranks[current_front] = current_rank next_front =", "concat [ values, measures] - reward +1 improved, -1 worsen, -1000 no sim,", "self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover )) self.pop =", "dis_m + 1.), 1. / (dis_m + 1)) - 1.) temp = site", "sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"]", "(dis_c + 1)) beta = beta * ((-1)** np.random.randint(2, size=(n // 2, d)))", "+ 1]], i)] - pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i] - fmin[i])", "- 0.5) * np.power(1. - norm, dis_m + 1.), 1. / (dis_m +", "simulation outputs data = np.zeros((n, 1)) return obj, cstr, data def individual(self, pop_vars):", "state and energy found. ''' if t_min <= 0.0: raise ValueError('Exponential cooling requires", "sorting SA - Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as", "is part of front 1*/ current_front = [i for i in range(N) if", "half the population. extra elements are only from the in the last from", "mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation", ": return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps = 10000, t_max = 1500.0,", "pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance dominator, dominated = objective_dominance(pop_obj,", "action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values", "seed = 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05", "= objective_dominance(pop_obj, i, j) if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1 elif pop_cstr[i]", "vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars,", "the simulation outputs data = np.zeros((n, 1)) return obj, cstr, data def individual(self,", "= np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts != np.inf] for f in", "<= 0.5] = np.power(2 * mu[mu <= 0.5], 1 / (dis_c + 1))", "= current_rank next_front = [] for index_a in current_front: # reduce the numbers", "parent_1_dec = pop_dec[:n // 2, :] parent_2_dec = pop_dec[n // 2:, :] beta", "max_front] last = [i for i in range(len(fronts)) if fronts[i]== max_front] delta_n =", "pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars)", "offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self, parameter_values): \"\"\" Inputs: -", "T) < np.random.random(): # Restore previous state state, obj, cstr, data, value =", "prev_state, prev_obj, prev_cstr, prev_data, prev_value else: # Accept new state and compare to", "population: current population :param n: number of selected individuals :return: next generation population", "the cost function. # In analog IC optimization we will use this #", "current_front: # reduce the numbers of domination to the ones in its set", "return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of the", "best state prev_state, prev_obj, prev_cstr, prev_data, prev_value = state, obj, cstr, data, value", "i and j are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j = False j_dominates_i", "pop_obj.shape i_dominates_j = False j_dominates_i = False for obj_idx in range(M): if pop_obj[i,obj_idx]", "not used in the optimization # it can be usefull to debug the", "# it can be usefull to debug the cost function. # In analog", "in the optimization # it can be usefull to debug the cost function.", "to the single objective optimizers to implement objective weigthing. objectives = [\"name\", ...", "= tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation =", "self.pop_cstr, self.pop_data = problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no)", "the last from index = [i for i in range(len(fronts)) if fronts[i] <", "temperature greater than zero.') # Note initial state if initial_state is None :", "* (mu[temp] - 0.5) * np.power(1. - norm, dis_m + 1.), 1. /", "= [max, ... ] \"\"\" def __init__(self, d, min, max): self.d = d", "<<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and fast", "else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return return", "NSGA-II algorithm \"\"\" if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size))", "0.5) lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp]", "as real values. Extending classes should round integers in the cost function if", "beta[mu <= 0.5] = np.power(2 * mu[mu <= 0.5], 1 / (dis_c +", "- lower[temp]) * \\ (1. - np.power( 2. * (1. - mu[temp]) +", "False for obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True", "front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front current_rank+=1 return ranks, current_rank-1", "- norm, dis_m + 1.), 1. / (dis_m + 1)) - 1.) temp", "crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while", "ranks[current_front] = current_rank next_front = [] for index_a in current_front: # reduce the", "( decison vars, objectives, constraints, data, rank, and cdist) ''' # fast non-dominated", "duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) < self.pop.shape[0] :", "for j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j +", "None if i and j are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j =", "f in range(len(fronts)): front = np.array([k for k in range(len(rank)) if rank[k] ==", "#assign the ranks and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets", "The crowding distance of the Pareto front \"front_id\" :param pop_obj: objective vectors :param", "2. * mu[temp]) * np.power(1. - norm, dis_m + 1.), 1. / (dis_m", "0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05", "None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the ranks of the", "parameters and variation operators of cross over and mutation for GA, and move", "x): \"\"\" calculate the objective and constraints vectors :param x: the decision vectors", "= crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no", "front = np.array([k for k in range(len(rank)) if rank[k] == fronts[f]]) fmax =", "- fmin[i]) if fmax[i] != fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj,", "self.value += self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation, reward, done,", ": 90.0},gt=gt), discrete_actions = False) sa = SA() print(circuit) for iter, stats in", "(upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (1. - np.power(", "= 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] =", "and crowding distance # arguably they could be refractored out of this function", "crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1, len(front) - 1):", ":param N: number of elements in the population :return: the initial population \"\"\"", "prev_data, prev_value = state, obj, cstr, data, value if value < best_value: best_state,", "mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param boundary: lower and upper boundary of", "repeat from collections import Sequence class Problem(object): \"\"\" The problem related parameters and", "vars, objectives, constraints, data, rank, and cdist) ''' # fast non-dominated sorting and", "fnd_sort only #sorts half the population. extra elements are only from the in", "[\"name\", ... ] min = [min, ... ] max = [max, ... ]", "used in the paper. Created on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA -", "= convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute factor for exponential cooling from", "distance of the Pareto front \"front_id\" :param pop_obj: objective vectors :param rank: front", "1*/ current_front = [i for i in range(N) if dominated_by_counter[i] == 0] while", "dis_c = 10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:]", "= {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] =", "current_front = [i for i in range(N) if dominated_by_counter[i] == 0] while np.sum(ranks", "environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr,", "meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] -", "dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter):", "np.inf) < N/2: ranks[current_front] = current_rank next_front = [] for index_a in current_front:", "gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False)", "\"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the objective and constraints vectors :param", "np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return mate def", "from index = [i for i in range(len(fronts)) if fronts[i] < max_front] last", "self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr def simulated_annealing(problem,", "fit: fitness vectors :return: index of selected solutions ''' n = len(rank) mate", "cooling requires a minimum temperature greater than zero.') # Note initial state if", ": i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True if", "= fronts[fronts != np.inf] for f in range(len(fronts)): front = np.array([k for k", "cstr def simulated_annealing(problem, steps = 10000, t_max = 1500.0, t_min = 2.5, initial_state=None,", "found. ''' if t_min <= 0.0: raise ValueError('Exponential cooling requires a minimum temperature", "front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations yield", "''' This is the implentations of Simulated Annealing and NSGA-II used in the", "evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals >", "x: the decision vectors :return: the objective, constraints, and additional data vectors \"\"\"", "and sets the ranks of the population elements using the fast non-dominated sorting", "best_value if __name__ == '__main__': seed = 17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"]", "\"\"\" if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop,", "between elements i and j of the population. :param pop_obj: the value of", "j in range(i+1,N): #constrained pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective pareto dominance", "NSGA-II used in the paper. Created on Nov, 2020 @author: <NAME> <<EMAIL>> NSGA", "sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"]", "will use this # data to store the simulation outputs data = np.zeros((n,", "norm = (offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] -", "fronts[i] < max_front] last = [i for i in range(len(fronts)) if fronts[i]== max_front]", "if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data = problem.individual(problem.initialize(pop_size)) else: self.pop, self.pop_obj,", "(1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta", "prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj,", "max_rank: max rank \"\"\" N,M = pop_obj.shape # structures for holding the domination", "self.pop_obj[~select, :], self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in zip(select,", "vectors into individuals :param pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj,", "j: the elems being compared :returns: dominator: the index of the dominator, None", "\"\"\" Computes and sets the ranks of the population elements using the fast", "problem.individual(problem.move(state)) value = convert_multi_obj(obj, cstr) dV = 100*(value - prev_value) if dV >", "constraints vectors :param x: the decision vectors :return: the objective, constraints, and additional", "front_no #remove duplicates vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) <", "crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr,", "of concat [ values, measures] - reward +1 improved, -1 worsen, -1000 no", "to be selected :param fit: fitness vectors :return: index of selected solutions '''", "__init__(self, d, min, max): self.d = d self.upper = max self.lower = min", "np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament", "i, v in zip(select, data): self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data", "vectors :return: index of selected solutions ''' n = len(rank) mate = np.zeros(n,", "pop_obj, pop_cstr, pop_data, n): ''' Environmental selection in NSGA-II :param population: current population", "and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of", "an array with the ranks max_rank: max rank \"\"\" N,M = pop_obj.shape #", "# if non dominated is part of front 1*/ current_front = [i for", "np.power(2 * mu[mu <= 0.5], 1 / (dis_c + 1)) beta[mu > 0.5]", "the implentations of Simulated Annealing and NSGA-II used in the paper. Created on", "vals, index = np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) < self.pop.shape[0] : select", "min, max): self.d = d self.upper = max self.lower = min def __str__(self):", "dominated - then they are part of next front) if dominated_by_counter[index_b] == 0:", "else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental selection", "improved, -1 worsen, -1000 no sim, 1000 meet specs \"\"\" action = np.random.normal(scale=0.1,", "norm, dis_m + 1.), 1. / (dis_m + 1)) - 1.) temp =", "rank, and cdist) ''' # fast non-dominated sorting and crowding distance # arguably", "elements from all fronts except the last. Note that fnd_sort only #sorts half", "rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]]", "= np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for i in delta_n]) return pop_dec[index,:], pop_obj[index,:],", "self.pop_cstr[~select], data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in zip(select, data): self.pop_data[i]", "of elements dominating (Nx1) :returns: ranks: an array with the ranks max_rank: max", "d self.upper = max self.lower = min def __str__(self): return \"Target: {}\".format(self.target) def", "best_state, best_obj, best_cstr, best_data = problem.initialize(1) else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state)", ":param N: number of solutions to be selected :param fit: fitness vectors :return:", "current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding distance of the", "best_state prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data best_value = prev_value", "0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit", "pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize", "info required for fast nd sorting dominate = [[] for x in range(N)]", "=True) if len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index) self.pop[~select, :], self.pop_obj[~select,", "yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals > 0: mating_pool =", "n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist", "self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data +", "[[] for x in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i in range(N):", "pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param boundary: lower and upper boundary", "def minimize(self, problem, pop_size=100, evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm", "and j are non-dominated dominated: the index of the dominated, None if i", "i)] ) / ((fmax[i] - fmin[i]) if fmax[i] != fmin[i] else 1.0) return", "best state and energy return best_state, best_value if __name__ == '__main__': seed =", "+ parent_2_dec) / 2 + beta * (parent_1_dec - parent_2_dec) / 2, (parent_1_dec", "1 T = t_max * math.exp(cooling_factor * step / steps) state, obj, cstr,", "self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\" initialize the population", "import numpy as np import math from itertools import repeat from collections import", "t_min = 2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the energy of a", "/ 2 + beta * (parent_1_dec - parent_2_dec) / 2, (parent_1_dec + parent_2_dec)", "<NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import math from itertools", "current_rank next_front = [] for index_a in current_front: # reduce the numbers of", "= np.zeros((n, 1)) return obj, cstr, data def individual(self, pop_vars): \"\"\" turn decision", "and mutation for GA, and move for SA. Parameters are handled in variation", "turn decision vectors into individuals :param pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr)", "(upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (np.power(2. * mu[temp]", "in index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100 * 500,", "fast non-dominated sorting method. :param dominate: list of dominated population elements [[]*N] :param", "20 pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d) = np.shape(pop_dec) parent_1_dec", "max rank \"\"\" N,M = pop_obj.shape # structures for holding the domination info", ":], mutation = mutation, crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj =", "the optimization # it can be usefull to debug the cost function. #", "= np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data =", "population :return: the initial population \"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper -", "[pop_data[i] for i in index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100,", "= best_state prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data best_value =", "return mate def objective_dominance(pop_obj, i, j): \"\"\" Computes objective-wise dominance between elements i", "non dominated is part of front 1*/ current_front = [i for i in", "the population. :param pop_obj: the value of the populations' objectives :param i, j:", "if i_dominates_j and (not j_dominates_i): return i, j if (not i_dominates_j) and j_dominates_i:", "k in range(len(rank)) if rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0) fmin =", ":] parent_2_dec = pop_dec[n // 2:, :] beta = np.zeros((n // 2, d))", "the fast non-dominated sorting method. :param pop_obj: population objectives (NxM) :param pop_cstr: population", "= convert_multi_obj(obj, cstr) dV = 100*(value - prev_value) if dV > 0.0 and", "1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j - 1]],", "cstr = np.zeros(n) # data associated with the solutions but not used in", "pop_obj: the value of the populations' objectives :param i, j: the elems being", "else: # Accept new state and compare to best state prev_state, prev_obj, prev_cstr,", "(upper[temp] - lower[temp]) * \\ (1. - np.power( 2. * (1. - mu[temp])", "< max_front] last = [i for i in range(len(fronts)) if fronts[i]== max_front] delta_n", "front_no while evals > 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data", "array with the ranks max_rank: max rank \"\"\" N = len(dominate) ranks =", "[min, ... ] max = [max, ... ] \"\"\" def __init__(self, d, min,", "of the population. :param pop_obj: the value of the populations' objectives :param i,", "evaluations=100 * 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop is", "elems being compared :returns: dominator: the index of the dominator, None if i", "def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the objective and", "population \"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper - self.lower)) + self.lower return", "mutation = mutation, crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj,", "objective and constraints vectors :param x: the decision vectors :return: the objective, constraints,", "0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05", "be usefull to debug the cost function. # In analog IC optimization we", "(pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr,", "best state and energy found. ''' if t_min <= 0.0: raise ValueError('Exponential cooling", "== rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] <", "dis_m + 1.), 1. / (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower)", "np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1, len(front)", "# In analog IC optimization we will use this # data to store", "for exponential cooling from Tmax to Tmin cooling_factor = -math.log(t_max / t_min) #", "= np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :] parent_2_dec = pop_dec[n // 2:,", "len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]]", "collections import Sequence class Problem(object): \"\"\" The problem related parameters and variation operators", "\"\"\" Computes objective-wise dominance between elements i and j of the population. :param", "= problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals =", "update is self.value += self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation,", "lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (np.power(2. * mu[temp] + (1.", "lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa = SA() print(circuit) for iter,", "boundary: lower and upper boundary of pop_dec once d != self.d :param pop_dec:", "for i in range(N): for j in range(i+1,N): #constrained pareto dominance if pop_cstr[i]", "Computes objective-wise dominance between elements i and j of the population. :param pop_obj:", "fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from all fronts except the", "ranks: an array with the ranks max_rank: max rank \"\"\" N = len(dominate)", "being compared :returns: dominator: the index of the dominator, None if i and", "- pop_obj[(front[sorted_index[j - 1]], i)] ) / ((fmax[i] - fmin[i]) if fmax[i] !=", "== pop_cstr[j]: #objective pareto dominance dominator, dominated = objective_dominance(pop_obj, i, j) if dominator", "problem.individual(initial_pop) front_no, max_front = fnd_sort(self.pop_obj, self.pop_cstr) crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations", "integers in the cost function if needed. cost_fun() is defined for multi-objective multi-constraint", "round integers in the cost function if needed. cost_fun() is defined for multi-objective", "list of dominated population elements [[]*N] :param dominated_by_counter: counter of elements dominating (Nx1)", "2:, :] beta = np.zeros((n // 2, d)) mu = np.random.random((n // 2,", "# Note initial state if initial_state is None : best_state, best_obj, best_cstr, best_data", "over and mutation for GA, and move for SA. Parameters are handled in", "if fronts[i] < max_front] last = [i for i in range(len(fronts)) if fronts[i]==", "Environmental selection in NSGA-II :param population: current population :param n: number of selected", "sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"]", "operators as real values. Extending classes should round integers in the cost function", "d)) < 0.5] = 1 beta[np.tile(np.random.random((n // 2, 1)) > crossover, (1, d))]", "elif pop_cstr[i] < pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i", "This is the implentations of Simulated Annealing and NSGA-II used in the paper.", "* \\ (1. - np.power( 2. * (1. - mu[temp]) + 2. *", "site & (mu > 0.5) norm = (upper[temp] - offspring_dec[temp]) / (upper[temp] -", "site & (mu <= 0.5) lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n,", "be refractored out of this function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis =", "mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop is None: self.pop, self.pop_obj,", "dominator, dominated = objective_dominance(pop_obj, i, j) if dominator is not None: dominate[dominator].append(dominated) dominated_by_counter[dominated]+=1", "sorting method. :param dominate: list of dominated population elements [[]*N] :param dominated_by_counter: counter", "+= self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation, reward, done, {}", "and math.exp(-dV / T) < np.random.random(): # Restore previous state state, obj, cstr,", "math.exp(cooling_factor * step / steps) state, obj, cstr, data = problem.individual(problem.move(state)) value =", "pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from all fronts except the last.", "pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self,", "population objectives (NxM) :param pop_cstr: population constraint violation (Nx1) :returns: ranks: an array", "crossover=0.6): \"\"\" Generate offspring individuals :param boundary: lower and upper boundary of pop_dec", "of selected solutions ''' n = len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] =", "= np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]),", ":param K: number of solutions to be compared :param N: number of solutions", "specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:,", "np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] - lower[temp]) / (upper[temp]", "index.extend([last[i] for i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in", "= self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data,", "elements i and j of the population. :param pop_obj: the value of the", "in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]], i)]", "- 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j + 1]], i)] - pop_obj[(front[sorted_index[j -", "/ 2)) site = np.random.random((n, d)) < mutation mu = np.random.random((n, d)) temp", "< best_value: best_state, best_obj, best_cstr, best_data, best_value, = state, obj, cstr, data, value", "cstr, data, value = prev_state, prev_obj, prev_cstr, prev_data, prev_value else: # Accept new", "solutions to be selected :param fit: fitness vectors :return: index of selected solutions", "parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2]", "= np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist): n = len(rank) index_a =", "lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (np.power(2.", "pop_cstr, pop_data = self.cost_fun(pop_vars) return (pop_vars, pop_obj, pop_cstr, pop_data) def initialize(self, N): \"\"\"", "of the dominated, None if i and j are non-dominated \"\"\" _,M =", "i in range(len(fronts)) if fronts[i] < max_front] last = [i for i in", "sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' :", "2. * (1. - mu[temp]) + 2. * (mu[temp] - 0.5) * np.power(1.", "fronts = np.unique(rank) fronts = fronts[fronts != np.inf] for f in range(len(fronts)): front", "size=len(parameter_values)) parameter_values = parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values /", "moves to new states while step < steps: step += 1 T =", "temp = site & (mu > 0.5) norm = (upper[temp] - offspring_dec[temp]) /", "pop_cstr[j]: #objective pareto dominance dominator, dominated = objective_dominance(pop_obj, i, j) if dominator is", "cooling from Tmax to Tmin cooling_factor = -math.log(t_max / t_min) # Attempt moves", "value if value < best_value: best_state, best_obj, best_cstr, best_data, best_value, = state, obj,", "default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps = 10000, t_max", "return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr", "crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size yield self.pop,", "d != self.d :param pop_dec: decision vectors :return: \"\"\" dis_c = 10 dis_m", "raise ValueError('Exponential cooling requires a minimum temperature greater than zero.') # Note initial", "sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit =", "be selected :param fit: fitness vectors :return: index of selected solutions ''' n", "next_front = [] for index_a in current_front: # reduce the numbers of domination", "+ 1)) beta = beta * ((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n", "front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size yield", "0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"] = 0.05 sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05", "best_state, best_obj, best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step = 0", "= pop_dec[:n // 2, :] parent_2_dec = pop_dec[n // 2:, :] beta =", "np.array([k for k in range(len(rank)) if rank[k] == fronts[f]]) fmax = pop_obj[front, :].max(0)", "crowd_dis = crowding_distance(self.pop_obj, front_no) evals = evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals,", "- 2. * mu[temp]) * np.power(1. - norm, dis_m + 1.), 1. /", "initial population \"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper - self.lower)) + self.lower", "# structures for holding the domination info required for fast nd sorting dominate", "- 1]], i)] ) / ((fmax[i] - fmin[i]) if fmax[i] != fmin[i] else", "np.inf * np.ones(N) current_rank = 1 # if non dominated is part of", "population :param N: number of elements in the population :return: the initial population", "+ self.lower return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals", "to Tmin cooling_factor = -math.log(t_max / t_min) # Attempt moves to new states", "= np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self, parameter_values): \"\"\" Inputs: - value", "Accept new state and compare to best state prev_state, prev_obj, prev_cstr, prev_data, prev_value", "population constraint violation (Nx1) :returns: ranks: an array with the ranks max_rank: max", "0.5] = np.power(2 * mu[mu > 0.5], -1 / (dis_c + 1)) beta", "in range(len(fronts)) if fronts[i]== max_front] delta_n = np.argsort(-crowd_dis[last])[: (n - len(index))] index.extend([last[i] for", "i in range(N): for j in range(i+1,N): #constrained pareto dominance if pop_cstr[i] ==", "self.offspring_obj, self.offspring_cstr, self.offspring_data = problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover ))", "objectives, constraints, data, rank, and cdist) ''' # fast non-dominated sorting and crowding", "range(N): for j in range(i+1,N): #constrained pareto dominance if pop_cstr[i] == pop_cstr[j]: #objective", "17 np.random.seed(seed) sat_conditions = {} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] =", "state, obj, cstr, data, value if value < best_value: best_state, best_obj, best_cstr, best_data,", "ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding distance of the Pareto front", "len(index))] index.extend([last[i] for i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i", "return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps = 10000, t_max = 1500.0, t_min", "2][:] (n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :] parent_2_dec =", "half_tournemant(rank, cdist) mate[1::2] = half_tournemant(rank, cdist) return mate def objective_dominance(pop_obj, i, j): \"\"\"", "{} sat_conditions[\"vov_mpm0\"] = 0.05 sat_conditions[\"vov_mpm1\"] = 0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05", "print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r iter {}: {}\".format(iter, stats)) print(sa.best_state) print(circuit.simulate(sa.best_state))", "j, i return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the", "obj = np.zeros((n, 1)) cstr = np.zeros(n) # data associated with the solutions", "35e6,'pm' : 45.0, 'fom': 900} gt.update(sat_conditions) circuit = VCAmplifierCircuitOptProblem( ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm'", "return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding distance of the Pareto", "+ (1. - 2. * mu[temp]) * np.power(1. - norm, dis_m + 1.),", "data = problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in zip(select, data): self.pop_data[i] =", "\"\"\" The problem related parameters and variation operators of cross over and mutation", "measures] - reward +1 improved, -1 worsen, -1000 no sim, 1000 meet specs", "of the populations' objectives :param i, j: the elems being compared :returns: dominator:", "d))) beta[np.random.random((n // 2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n // 2, 1))", "self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals =", "np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :] parent_2_dec = pop_dec[n // 2:, :]", "range(N) if dominated_by_counter[i] == 0] while np.sum(ranks < np.inf) < N/2: ranks[current_front] =", "sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"]", "// 2, d)) < 0.5] = 1 beta[np.tile(np.random.random((n // 2, 1)) > crossover,", ":param i, j: the elems being compared :returns: dominator: the index of the", "+= 1 T = t_max * math.exp(cooling_factor * step / steps) state, obj,", "(offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) *", "current_rank = 1 # if non dominated is part of front 1*/ current_front", "function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj, fronts) #Select elements from", "* (parent_1_dec - parent_2_dec) / 2, (parent_1_dec + parent_2_dec) / 2 - beta", "= state, obj, cstr, data, value # Return best state and energy return", ": j_dominates_i = True if i_dominates_j and (not j_dominates_i): return i, j if", "2.5, initial_state=None, convert_multi_obj = default_mo_2_so): ''' Minimizes the energy of a system by", "<NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and", "(state, energy, objectives, constraints, data): the best state and energy found. ''' if", "- Addapted from <NAME>'s code from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import", "// 2, 1)) > crossover, (1, d))] = 1 offspring_dec = np.vstack(((parent_1_dec +", "# Precompute factor for exponential cooling from Tmax to Tmin cooling_factor = -math.log(t_max", "= False) sa = SA() print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r iter", "parent_2_dec) / 2)) site = np.random.random((n, d)) < mutation mu = np.random.random((n, d))", "= 0 # Precompute factor for exponential cooling from Tmax to Tmin cooling_factor", "initialize the population :param N: number of elements in the population :return: the", "states while step < steps: step += 1 T = t_max * math.exp(cooling_factor", "prev_cstr, prev_data, prev_value = state, obj, cstr, data, value if value < best_value:", "(NxM) :param pop_cstr: population constraint violation (Nx1) :returns: ranks: an array with the", "into individuals :param pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr,", "if i and j are non-dominated dominated: the index of the dominated, None", "(mu > 0.5) norm = (upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp]", "if value < best_value: best_state, best_obj, best_cstr, best_data, best_value, = state, obj, cstr,", "value < best_value: best_state, best_obj, best_cstr, best_data, best_value, = state, obj, cstr, data,", "handle constraint optimization and fast non-dominated sorting SA - Addapted from <NAME>'s code", "np.zeros((n, 1)) return obj, cstr, data def individual(self, pop_vars): \"\"\" turn decision vectors", "- lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\", "fmin[i]) if fmax[i] != fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr,", "with the solutions but not used in the optimization # it can be", "done, {} - observations array of concat [ values, measures] - reward +1", "and upper boundary of pop_dec once d != self.d :param pop_dec: decision vectors", "= 20 pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d) = np.shape(pop_dec)", "if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] :", "np.unique(self.pop.round(decimals=9), axis=0, return_index =True) if len(index) < self.pop.shape[0] : select = np.in1d(range(self.pop.shape[0]), index)", "0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05", "\"\"\" Generate offspring individuals :param boundary: lower and upper boundary of pop_dec once", "in range(N) if dominated_by_counter[i] == 0] while np.sum(ranks < np.inf) < N/2: ranks[current_front]", "= True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j and", "sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"]", "self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals,", "index of selected solutions ''' n = len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2]", "parameter_values): \"\"\" Inputs: - value - new device sizes self.values update is self.value", "for SA. Parameters are handled in variation operators as real values. Extending classes", "np.inf] for f in range(len(fronts)): front = np.array([k for k in range(len(rank)) if", "np.power(2 * mu[mu > 0.5], -1 / (dis_c + 1)) beta = beta", "of Simulated Annealing and NSGA-II used in the paper. Created on Nov, 2020", "discrete_actions = False) sa = SA() print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r", "* ((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n // 2, d)) < 0.5]", "pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d) = np.shape(pop_dec) parent_1_dec =", "def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\" Generate offspring individuals :param boundary: lower and", "< np.inf) < N/2: ranks[current_front] = current_rank next_front = [] for index_a in", "= np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist = cdist[index_a[:n//2]] > cdist[index_a[n//2:]]", "beta = np.zeros((n // 2, d)) mu = np.random.random((n // 2, d)) beta[mu", "= np.argsort(pop_obj[front, i]) crowd_dis[front[sorted_index[0]]] = np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1,", "np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def move(self, parameter_values): \"\"\" Inputs: - value -", "best_cstr, best_data = problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr, prev_data = best_state,", "temp = site & (mu <= 0.5) lower, upper = np.tile(self.lower, (n, 1)),", "= np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] - lower[temp]) /", "2020 @author: <NAME> <<EMAIL>> NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint", "= SA() print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r iter {}: {}\".format(iter, stats))", "state = best_state prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data best_value", "= len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]] == rank[index_a[n//2:]] larger_cdist =", "cooling_factor = -math.log(t_max / t_min) # Attempt moves to new states while step", "implement objective weigthing. objectives = [\"name\", ... ] parameters = [\"name\", ... ]", "= 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"] =", "= (offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp])", "= np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): '''", "part of front 1*/ current_front = [i for i in range(N) if dominated_by_counter[i]", "x in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for i in range(N): for j", "last. Note that fnd_sort only #sorts half the population. extra elements are only", "if non dominated is part of front 1*/ current_front = [i for i", "problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj, best_cstr, best_data", "sorting method. :param pop_obj: population objectives (NxM) :param pop_cstr: population constraint violation (Nx1)", "+ cstr def simulated_annealing(problem, steps = 10000, t_max = 1500.0, t_min = 2.5,", "mutation for GA, and move for SA. Parameters are handled in variation operators", "(upper[temp] - lower[temp]) * \\ (np.power(2. * mu[temp] + (1. - 2. *", "sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"]", "cost function if needed. cost_fun() is defined for multi-objective multi-constraint optimization. Its up", "self.pop_data def default_mo_2_so(objs, cstr) : return sum(objs)/len(objs) + cstr def simulated_annealing(problem, steps =", "= mutation, crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj))", "fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the ranks of the population elements using", "elements using the fast non-dominated sorting method. :param dominate: list of dominated population", "beta * (parent_1_dec - parent_2_dec) / 2)) site = np.random.random((n, d)) < mutation", "prev_state, prev_obj, prev_cstr, prev_data, prev_value = state, obj, cstr, data, value if value", "crowding_distance(pop_obj, rank): \"\"\" The crowding distance of the Pareto front \"front_id\" :param pop_obj:", "sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1 sat_conditions[\"delta_mnm9\"]", "the cost function if needed. cost_fun() is defined for multi-objective multi-constraint optimization. Its", "np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr,", "for i in range(N) if dominated_by_counter[i] == 0] while np.sum(ranks < np.inf) <", "data to store the simulation outputs data = np.zeros((n, 1)) return obj, cstr,", "for GA, and move for SA. Parameters are handled in variation operators as", "min def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the objective", "vectors :param x: the decision vectors :return: the objective, constraints, and additional data", "np.inf for j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]] += \\ ( pop_obj[(front[sorted_index[j", "problem related parameters and variation operators of cross over and mutation for GA,", "10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) // 2) * 2][:] (n, d)", "mutation mu = np.random.random((n, d)) temp = site & (mu <= 0.5) lower,", "dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1 # if( they become non dominated -", "using the fast non-dominated sorting method. :param pop_obj: population objectives (NxM) :param pop_cstr:", "* math.exp(cooling_factor * step / steps) state, obj, cstr, data = problem.individual(problem.move(state)) value", "is defined for multi-objective multi-constraint optimization. Its up to the single objective optimizers", "ranks max_rank: max rank \"\"\" N = len(dominate) ranks = np.inf * np.ones(N)", "state and compare to best state prev_state, prev_obj, prev_cstr, prev_data, prev_value = state,", "index of the dominator, None if i and j are non-dominated dominated: the", "True if i_dominates_j and (not j_dominates_i): return i, j if (not i_dominates_j) and", "[[]*N] :param dominated_by_counter: counter of elements dominating (Nx1) :returns: ranks: an array with", "N: number of elements in the population :return: the initial population \"\"\" pop_dec", "refractored out of this function fronts, max_front = fnd_sort(pop_obj, pop_cstr) crowd_dis = crowding_distance(pop_obj,", "self.d)) * (self.upper - self.lower)) + self.lower return pop_dec def variation(self, pop_dec, mutation=0.1,", "self.value + action*(self.ranges[:,1] - self.ranges[:, 0]) - Outouts: observation, reward, done, {} -", "+ action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:,", "= np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values = np.fmin(np.fmax(parameter_values,self.ranges[:, 0]),self.ranges[:,1]) return parameter_values def half_tournemant(rank, cdist):", "for i in range(len(fronts)) if fronts[i] < max_front] last = [i for i", ":returns: ranks: an array with the ranks max_rank: max rank \"\"\" N,M =", "i in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index], fronts[index],", "parent_2_dec) / 2 + beta * (parent_1_dec - parent_2_dec) / 2, (parent_1_dec +", "we will use this # data to store the simulation outputs data =", "while np.sum(ranks < np.inf) < N/2: ranks[current_front] = current_rank next_front = [] for", "sat_conditions[\"vov_mnm10\"] = 0.05 sat_conditions[\"vov_mnm11\"] = 0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"]", "is the implentations of Simulated Annealing and NSGA-II used in the paper. Created", "cdist[index_a[n//2:]] decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank,", "best_data = problem.initialize(1) else: best_state, best_obj, best_cstr, best_data = problem.individual(initial_state) state = best_state", "crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj = np.vstack((self.pop_obj, self.offspring_obj)) self.pop_cstr = np.concatenate((self.pop_cstr,", "from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and fast non-dominated sorting SA -", "#objective pareto dominance dominator, dominated = objective_dominance(pop_obj, i, j) if dominator is not", "pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j and (not j_dominates_i): return", ":param population: current population :param n: number of selected individuals :return: next generation", "= len(dominate) ranks = np.inf * np.ones(N) current_rank = 1 # if non", "pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i", "next_front.append(index_b) current_front = next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The", "pop_cstr, pop_data, n): ''' Environmental selection in NSGA-II :param population: current population :param", "= np.inf crowd_dis[front[sorted_index[-1]]] = np.inf for j in range(1, len(front) - 1): crowd_dis[front[sorted_index[j]]]", "individuals :param boundary: lower and upper boundary of pop_dec once d != self.d", "2) * 2][:] (n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2, :]", "sat_conditions[\"vov_mnm5\"] = 0.05 sat_conditions[\"vov_mnm6\"] = 0.05 sat_conditions[\"vov_mnm7\"] = 0.05 sat_conditions[\"vov_mnm8\"] = 0.05 sat_conditions[\"vov_mnm9\"]", "1. / (dis_m + 1.))) offspring_dec = np.maximum(np.minimum(offspring_dec, upper), lower) return offspring_dec def", "def half_tournemant(rank, cdist): n = len(rank) index_a = np.arange(n) np.random.shuffle(index_a) eq_rank = rank[index_a[:n//2]]", "i_dominates_j = True elif pop_obj[i,obj_idx] > pop_obj[j,obj_idx] : j_dominates_i = True if i_dominates_j", "math.exp(-dV / T) < np.random.random(): # Restore previous state state, obj, cstr, data,", "dominated_by_counter[dominated]+=1 elif pop_cstr[i] < pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: #", "with the ranks max_rank: max rank \"\"\" N,M = pop_obj.shape # structures for", "d)) mu = np.random.random((n // 2, d)) beta[mu <= 0.5] = np.power(2 *", "j are non-dominated \"\"\" _,M = pop_obj.shape i_dominates_j = False j_dominates_i = False", "< np.random.random(): # Restore previous state state, obj, cstr, data, value = prev_state,", "# data associated with the solutions but not used in the optimization #", "cost_fun() is defined for multi-objective multi-constraint optimization. Its up to the single objective", "Simulated Annealing and NSGA-II used in the paper. Created on Nov, 2020 @author:", "become non dominated - then they are part of next front) if dominated_by_counter[index_b]", "and additional data vectors \"\"\" n = x.shape[0] obj = np.zeros((n, 1)) cstr", "SA() print(circuit) for iter, stats in sa.minimize(circuit): print(\"\\r iter {}: {}\".format(iter, stats)) print(sa.best_state)", "minimum temperature greater than zero.') # Note initial state if initial_state is None", "] min = [min, ... ] max = [max, ... ] \"\"\" def", "0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1", "up to the single objective optimizers to implement objective weigthing. objectives = [\"name\",", "crowding distance of the Pareto front \"front_id\" :param pop_obj: objective vectors :param rank:", "1.), 1. / (dis_m + 1)) - 1.) temp = site & (mu", "fast non-dominated sorting and crowding distance # arguably they could be refractored out", "= np.power(2 * mu[mu <= 0.5], 1 / (dis_c + 1)) beta[mu >", "[] for index_a in current_front: # reduce the numbers of domination to the", "domination to the ones in its set of dominance for index_b in dominate[index_a]:", "offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (1. - np.power( 2. * (1.", "problem.individual(problem.initialize(self.pop.shape[0] - len(index))) for i, v in zip(select, data): self.pop_data[i] = v return", "- parent_2_dec) / 2, (parent_1_dec + parent_2_dec) / 2 - beta * (parent_1_dec", "populations' objectives :param i, j: the elems being compared :returns: dominator: the index", "= next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding distance", "environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental selection in NSGA-II :param population: current", "best_state, best_value if __name__ == '__main__': seed = 17 np.random.seed(seed) sat_conditions = {}", "dominating (Nx1) :returns: ranks: an array with the ranks max_rank: max rank \"\"\"", "self.pop_cstr = np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no,", "next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front current_rank+=1 return ranks,", "np.shape(pop_obj) crowd_dis = np.zeros(n) fronts = np.unique(rank) fronts = fronts[fronts != np.inf] for", "Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and fast non-dominated sorting SA", "= max self.lower = min def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x):", "self.ranges[:, 0]) - Outouts: observation, reward, done, {} - observations array of concat", "obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j = True elif pop_obj[i,obj_idx]", "selected solutions ''' n = len(rank) mate = np.zeros(n, dtype=np.int16) mate[::2] = half_tournemant(rank,", "solutions but not used in the optimization # it can be usefull to", "= True if i_dominates_j and (not j_dominates_i): return i, j if (not i_dominates_j)", "pop_size) evals = evals - pop_size yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no", "1)) norm = (offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp]", "sorting dominate = [[] for x in range(N)] dominated_by_counter = np.zeros(N, dtype=int) for", "= [i for i in range(len(fronts)) if fronts[i] < max_front] last = [i", "current_front = next_front current_rank+=1 return ranks, current_rank-1 def crowding_distance(pop_obj, rank): \"\"\" The crowding", "the ranks and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the", "offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (np.power(2. * mu[temp] + (1. -", ":returns: ranks: an array with the ranks max_rank: max rank \"\"\" N =", "objectives, constraints, data): the best state and energy found. ''' if t_min <=", "problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover )) self.pop = np.vstack((self.pop, self.offspring_dec)) self.pop_obj", "np.sum(ranks < np.inf) < N/2: ranks[current_front] = current_rank next_front = [] for index_a", "x.shape[0] obj = np.zeros((n, 1)) cstr = np.zeros(n) # data associated with the", "to handle constraint optimization and fast non-dominated sorting SA - Addapted from <NAME>'s", "in the cost function if needed. cost_fun() is defined for multi-objective multi-constraint optimization.", "# i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the ranks and return return assign_rank(dominate,", "max): self.d = d self.upper = max self.lower = min def __str__(self): return", "norm = (upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] -", "the initial population \"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper - self.lower)) +", "vectors \"\"\" n = x.shape[0] obj = np.zeros((n, 1)) cstr = np.zeros(n) #", "decision_a = np.logical_or(np.logical_and(eq_rank, larger_cdist), rank[index_a[:n//2]] < rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist):", "cstr) dV = 100*(value - prev_value) if dV > 0.0 and math.exp(-dV /", "// 2) * 2][:] (n, d) = np.shape(pop_dec) parent_1_dec = pop_dec[:n // 2,", "= 1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta * (parent_1_dec", "= problem.individual( problem.variation(self.pop[mating_pool, :], mutation = mutation, crossover= crossover )) self.pop = np.vstack((self.pop,", "be compared :param N: number of solutions to be selected :param fit: fitness", "0.05 sat_conditions[\"delta_mpm0\"] = 0.1 sat_conditions[\"delta_mpm1\"] = 0.1 sat_conditions[\"delta_mpm2\"] = 0.1 sat_conditions[\"delta_mpm3\"] = 0.1", "dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j) dominated_by_counter[j]+=1 #assign the", ":return: next generation population ( decison vars, objectives, constraints, data, rank, and cdist)", "\"\"\" _,M = pop_obj.shape i_dominates_j = False j_dominates_i = False for obj_idx in", "np.random.random((n, d)) temp = site & (mu <= 0.5) lower, upper = np.tile(self.lower,", "parameter_values + action*(self.ranges[:,1] - self.ranges[:, 0]) parameter_values = np.round(parameter_values / self.ranges[:,2])*self.ranges[:,2] parameter_values =", "range(len(fronts)) if fronts[i] < max_front] last = [i for i in range(len(fronts)) if", "(mu[temp] - 0.5) * np.power(1. - norm, dis_m + 1.), 1. / (dis_m", "Computes and sets the ranks of the population elements using the fast non-dominated", "= 0.1 sat_conditions[\"delta_mpm3\"] = 0.1 sat_conditions[\"delta_mnm4\"] = 0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] =", "rank[index_a[n//2:]]) return index_a[np.r_[decision_a, ~decision_a]] def tournament(rank, cdist): ''' tournament selection :param K: number", "j_dominates_i = False for obj_idx in range(M): if pop_obj[i,obj_idx] < pop_obj[j,obj_idx] : i_dominates_j", "+ beta * (parent_1_dec - parent_2_dec) / 2, (parent_1_dec + parent_2_dec) / 2", "the index of the dominator, None if i and j are non-dominated dominated:", "np.zeros((n // 2, d)) mu = np.random.random((n // 2, d)) beta[mu <= 0.5]", "in delta_n]) return pop_dec[index,:], pop_obj[index,:], pop_cstr[index], [pop_data[i] for i in index], fronts[index], crowd_dis[index],index", "<filename>src/optimizers.py ''' This is the implentations of Simulated Annealing and NSGA-II used in", "0.1 sat_conditions[\"delta_mnm5\"] = 0.1 sat_conditions[\"delta_mnm6\"] = 0.1 sat_conditions[\"delta_mnm7\"] = 0.1 sat_conditions[\"delta_mnm8\"] = 0.1", "ranks: an array with the ranks max_rank: max rank \"\"\" N,M = pop_obj.shape", "best_obj, best_cstr, best_data best_value = prev_value = convert_multi_obj(best_obj, best_cstr) step = 0 #", "ng.Specifications(objective=[('idd', 1)], lt={'idd': 35e-5,'pm' : 90.0},gt=gt), discrete_actions = False) sa = SA() print(circuit)", "pop_cstr): \"\"\" Computes and sets the ranks of the population elements using the", "pop_cstr[j]: # j dominates i dominate[j].append(i) dominated_by_counter[i]+=1 else: # i dominates j dominate[i].append(j)", ":param pop_vars: decision vectors :return: (pop_vars, pop_obj, pop_cstr) \"\"\" pop_obj, pop_cstr, pop_data =", "lower[temp]) * \\ (np.power(2. * mu[temp] + (1. - 2. * mu[temp]) *", "the dominated, None if i and j are non-dominated \"\"\" _,M = pop_obj.shape", "evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals > 0: mating_pool", "def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data, n): ''' Environmental selection in NSGA-II :param population:", "1 offspring_dec = np.vstack(((parent_1_dec + parent_2_dec) / 2 + beta * (parent_1_dec -", "offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] += (upper[temp] - lower[temp]) * \\ (1.", "beta * ((-1)** np.random.randint(2, size=(n // 2, d))) beta[np.random.random((n // 2, d)) <", "ranks and return return assign_rank(dominate, dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks", "and energy found. ''' if t_min <= 0.0: raise ValueError('Exponential cooling requires a", "max = [max, ... ] \"\"\" def __init__(self, d, min, max): self.d =", "index of the dominated, None if i and j are non-dominated \"\"\" _,M", "1 # if non dominated is part of front 1*/ current_front = [i", "<= 0.0: raise ValueError('Exponential cooling requires a minimum temperature greater than zero.') #", "np.ones(N) current_rank = 1 # if non dominated is part of front 1*/", "(self.upper - self.lower)) + self.lower return pop_dec def variation(self, pop_dec, mutation=0.1, crossover=0.6): \"\"\"", "zip(select, data): self.pop_data[i] = v return self.pop, self.pop_obj, self.pop_cstr, self.pop_data def default_mo_2_so(objs, cstr)", "0.1 sat_conditions[\"delta_mnm9\"] = 0.1 sat_conditions[\"delta_mnm10\"] = 0.1 sat_conditions[\"delta_mnm11\"] = 0.1 gt={'gdc': 50,'gbw': 35e6,'pm'", "energy return best_state, best_value if __name__ == '__main__': seed = 17 np.random.seed(seed) sat_conditions", "to implement objective weigthing. objectives = [\"name\", ... ] parameters = [\"name\", ...", "Attempt moves to new states while step < steps: step += 1 T", "import Sequence class Problem(object): \"\"\" The problem related parameters and variation operators of", "while evals > 0: mating_pool = tournament(front_no, crowd_dis) self.offspring_dec, self.offspring_obj, self.offspring_cstr, self.offspring_data =", "obj, cstr, data, value if value < best_value: best_state, best_obj, best_cstr, best_data, best_value,", "state and energy return best_state, best_value if __name__ == '__main__': seed = 17", "holding the domination info required for fast nd sorting dominate = [[] for", "Sequence class Problem(object): \"\"\" The problem related parameters and variation operators of cross", "beta * (parent_1_dec - parent_2_dec) / 2, (parent_1_dec + parent_2_dec) / 2 -", "the population :return: the initial population \"\"\" pop_dec = (np.random.random((N, self.d)) * (self.upper", "calculate the objective and constraints vectors :param x: the decision vectors :return: the", "ranks max_rank: max rank \"\"\" N,M = pop_obj.shape # structures for holding the", "(n, 1)) norm = (offspring_dec[temp] - lower[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] +=", "numbers of domination to the ones in its set of dominance for index_b", "from https://github.com/perrygeo/simanneal/blob/master/simanneal/anneal.py ''' import numpy as np import math from itertools import repeat", "def move(self, parameter_values): \"\"\" Inputs: - value - new device sizes self.values update", "analog IC optimization we will use this # data to store the simulation", "state, obj, cstr, data, value # Return best state and energy return best_state,", "// 2:, :] beta = np.zeros((n // 2, d)) mu = np.random.random((n //", "self.pop_data, front_no, crowd_dis,_ = environment_selection(self.pop, self.pop_obj, self.pop_cstr,self.pop_data, pop_size) evals = evals - pop_size", "- np.power( 2. * (1. - mu[temp]) + 2. * (mu[temp] - 0.5)", "of the dominator, None if i and j are non-dominated dominated: the index", "for i in index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem, pop_size=100, evaluations=100", "non-dominated dominated: the index of the dominated, None if i and j are", "= min def __str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the", "return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the objective and constraints vectors", "* mu[mu <= 0.5], 1 / (dis_c + 1)) beta[mu > 0.5] =", "tournament(rank, cdist): ''' tournament selection :param K: number of solutions to be compared", "i return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes and sets the ranks", "Note that fnd_sort only #sorts half the population. extra elements are only from", "lower, upper = np.tile(self.lower, (n, 1)), np.tile(self.upper, (n, 1)) norm = (offspring_dec[temp] -", "lower[temp]) * \\ (1. - np.power( 2. * (1. - mu[temp]) + 2.", "part of next front) if dominated_by_counter[index_b] == 0: next_front.append(index_b) current_front = next_front current_rank+=1", "= np.concatenate((self.pop_cstr, self.offspring_cstr)) self.pop_data = self.pop_data + self.offspring_data self.pop, self.pop_obj,self.pop_cstr, self.pop_data, front_no, crowd_dis,_", "__str__(self): return \"Target: {}\".format(self.target) def cost_fun(self, x): \"\"\" calculate the objective and constraints", "> 0.5) norm = (upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp]) offspring_dec[temp] +=", "np.power( 2. * (1. - mu[temp]) + 2. * (mu[temp] - 0.5) *", "elements [[]*N] :param dominated_by_counter: counter of elements dominating (Nx1) :returns: ranks: an array", "to the ones in its set of dominance for index_b in dominate[index_a]: dominated_by_counter[index_b]-=1", "[\"name\", ... ] parameters = [\"name\", ... ] min = [min, ... ]", "pop_dec: decision vectors :return: \"\"\" dis_c = 10 dis_m = 20 pop_dec =", "vectors :return: \"\"\" dis_c = 10 dis_m = 20 pop_dec = pop_dec[:(len(pop_dec) //", "= evaluations yield self.pop, self.pop_obj, self.pop_cstr, self.pop_data, evals, front_no while evals > 0:", "d)) < mutation mu = np.random.random((n, d)) temp = site & (mu <=", "NSGA - Adapted from https://github.com/ChengHust/NSGA-II Updated to handle constraint optimization and fast non-dominated", "rank: front numbers :return: crowding distance \"\"\" n, M = np.shape(pop_obj) crowd_dis =", "energy, objectives, constraints, data): the best state and energy found. ''' if t_min", "value - new device sizes self.values update is self.value += self.value + action*(self.ranges[:,1]", "0.05 sat_conditions[\"vov_mpm2\"] = 0.05 sat_conditions[\"vov_mpm3\"] = 0.05 sat_conditions[\"vov_mnm4\"] = 0.05 sat_conditions[\"vov_mnm5\"] = 0.05", "Outouts: observation, reward, done, {} - observations array of concat [ values, measures]", "= [\"name\", ... ] parameters = [\"name\", ... ] min = [min, ...", "prev_value = convert_multi_obj(best_obj, best_cstr) step = 0 # Precompute factor for exponential cooling", "mu[mu <= 0.5], 1 / (dis_c + 1)) beta[mu > 0.5] = np.power(2", "pop_cstr[index], [pop_data[i] for i in index], fronts[index], crowd_dis[index],index class NSGA2: def minimize(self, problem,", ": best_state, best_obj, best_cstr, best_data = problem.initialize(1) else: best_state, best_obj, best_cstr, best_data =", "selected individuals :return: next generation population ( decison vars, objectives, constraints, data, rank,", "dominated_by_counter) def assign_rank(dominate, dominated_by_counter): \"\"\" sets the ranks of the population elements using", "if fmax[i] != fmin[i] else 1.0) return crowd_dis def environment_selection(pop_dec, pop_obj, pop_cstr, pop_data,", "0.5] = np.power(2 * mu[mu <= 0.5], 1 / (dis_c + 1)) beta[mu", "* 500, mutation=0.2, crossover=0.8, initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop is None:", "SA. Parameters are handled in variation operators as real values. Extending classes should", "best_data = problem.individual(initial_state) state = best_state prev_state, prev_obj, prev_cstr, prev_data = best_state, best_obj,", "NSGA-II :param population: current population :param n: number of selected individuals :return: next", "and j_dominates_i: return j, i return None, None def fnd_sort(pop_obj, pop_cstr): \"\"\" Computes", "sim, 1000 meet specs \"\"\" action = np.random.normal(scale=0.1, size=len(parameter_values)) parameter_values = parameter_values +", "initial_pop=None): \"\"\" NSGA-II algorithm \"\"\" if initial_pop is None: self.pop, self.pop_obj, self.pop_cstr, self.pop_data", "Restore previous state state, obj, cstr, data, value = prev_state, prev_obj, prev_cstr, prev_data,", "of elements in the population :return: the initial population \"\"\" pop_dec = (np.random.random((N,", "& (mu > 0.5) norm = (upper[temp] - offspring_dec[temp]) / (upper[temp] - lower[temp])", "# reduce the numbers of domination to the ones in its set of", "fast non-dominated sorting method. :param pop_obj: population objectives (NxM) :param pop_cstr: population constraint", "1.) temp = site & (mu > 0.5) norm = (upper[temp] - offspring_dec[temp])" ]
[ "generateFermi(self): for i in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def", "= statesIn self.nParticles = particlesIn self.below_fermi = [] self.above_fermi = [] self.states =", "self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi = [] self.above_fermi = [] self.states", "__init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi = [] self.above_fermi", "statesIn, particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi = [] self.above_fermi =", "PairingBasisGen: def generateFermi(self): for i in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates):", "range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in", "range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn", "for i in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self):", "def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates", "j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1))", "= particlesIn self.below_fermi = [] self.above_fermi = [] self.states = [] self.generateFermi() self.generateStates()", "self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi =", "generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates =", "in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def", "self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn,", "def generateFermi(self): for i in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j)", "i in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for", "for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn", "particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi = [] self.above_fermi = []", "self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2):", "self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn):", "self.nParticles = particlesIn self.below_fermi = [] self.above_fermi = [] self.states = [] self.generateFermi()", "range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self,", "in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp", "def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi = []", "statesIn self.nParticles = particlesIn self.below_fermi = [] self.above_fermi = [] self.states = []", "self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles = particlesIn self.below_fermi", "class PairingBasisGen: def generateFermi(self): for i in range(0,self.nParticles): self.below_fermi.append(i) for j in range(self.nParticles,", "for j in range(self.nParticles, self.nSpStates): self.above_fermi.append(j) def generateStates(self): for sp in range(0,self.nSpStates/2): self.states.append((sp+1,1))", "in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles =", "sp in range(0,self.nSpStates/2): self.states.append((sp+1,1)) self.states.append((sp+1,-1)) def __init__(self, statesIn, particlesIn): self.nSpStates = statesIn self.nParticles" ]
[ "100: speed = 100 if speed < -72: speed = -72 kit.servo[esc_pin].angle =", "throttle_full_reverse = 0 steering_pin = 15 esc_pin = 14 frontdiff_pin = 11 reardiff_pin", "if output == None: return default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1))", "rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received is None: print(\"no battery info\") break", "struct import numpy as np from adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379,", "def steering_angle(angle): if angle > 55: angle = 55 if angle < -55:", "< 0.05 and time.time() - in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\")", "np from adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16)", "driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1))", "output == None: return default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked", "-72 kit.servo[esc_pin].angle = speed * speed_cap / 100 + 72 driving = True", "if target_speed is None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start = time.time() else:", "0.05 and time.time() - in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else:", "None: current_speed = float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no driving input received\")", "#steering angle 30 - 150 throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse =", "= time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear", "angle > 55: angle = 55 if angle < -55: angle = -55", "30 - 150 throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin", "rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received", "speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle') if angle_received is", "14 frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos = [0,", "of max speed #steering angle 30 - 150 throttle_stop = 72 throttle_full_forward =", "faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start =", "#percentage of max speed #steering angle 30 - 150 throttle_stop = 72 throttle_full_forward", "= ServoKit(channels=16) #controllable variables def rget_and_float(name, default = None): output = r.get(name) if", "= [78, 15] #0 locked, 1 open front_diff_servo_pos = [120, 55] #0 locked,", "front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle =", "steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20) time.sleep(1)", "import math as m import redis import struct import numpy as np from", "None): output = r.get(name) if output == None: return default else: return float(output)", "#0 locked, 1 open front_diff_servo_pos = [120, 55] #0 locked, 1 open def", "r.get('voltages') if voltages_received is None: print(\"no battery info\") break else: voltages = np.array(struct.unpack('%sf'", "ServoKit(channels=16) #controllable variables def rget_and_float(name, default = None): output = r.get(name) if output", "front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #----", "< -55: angle = -55 kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed): if", "speed #steering angle 30 - 150 throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse", "time import math as m import redis import struct import numpy as np", "= front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received is None:", "driving input received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed = float(target_speed) if target_speed", "> 0: if current_speed < 0.05 and time.time() - in_motion_start > 2: driving_speed_signal(target_speed", "speed < -72: speed = -72 kit.servo[esc_pin].angle = speed * speed_cap / 100", "* 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else: driving_speed_signal(0)", "#print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle') if angle_received is None: #print(\"no steering", "as np from adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit =", "None: print(\"no battery info\") break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min()", "angle < -55: angle = -55 kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed):", "battery info\") break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage:", "db=0) kit = ServoKit(channels=16) #controllable variables def rget_and_float(name, default = None): output =", "voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received", "is None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed =", "#print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ???", "- 150 throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin =", "= 100 if speed < -72: speed = -72 kit.servo[esc_pin].angle = speed *", "else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear =", "= 72 throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin = 15 esc_pin =", "#---- speed_cap = 45 #percentage of max speed #steering angle 30 - 150", "110] rear_diff_servo_pos = [78, 15] #0 locked, 1 open front_diff_servo_pos = [120, 55]", "angle = -55 kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed): if speed >", "is None: print(\"no battery info\") break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if", "-55: angle = -55 kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed): if speed", "low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage of max speed #steering", "r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20) time.sleep(1) steering_angle(20) time.sleep(1) steering_angle(-20)", "100 if speed < -72: speed = -72 kit.servo[esc_pin].angle = speed * speed_cap", "target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is not None: current_speed =", "None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed = float(target_speed)", "current_speed_received is not None: current_speed = float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no", "0 steering_pin = 15 esc_pin = 14 frontdiff_pin = 11 reardiff_pin = 13", "1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5)", "= 12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos = [78, 15] #0 locked,", "print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is not None:", "88 def driving_speed_signal(speed): if speed > 100: speed = 100 if speed <", "target_speed = float(target_speed) if target_speed > 0: if current_speed < 0.05 and time.time()", "redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable variables def rget_and_float(name, default = None):", "import time import math as m import redis import struct import numpy as", "output = r.get(name) if output == None: return default else: return float(output) rear_diff_locked", "1 open def steering_angle(angle): if angle > 55: angle = 55 if angle", "= True in_motion_start = time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked =", "= r.get(name) if output == None: return default else: return float(output) rear_diff_locked =", "angle 30 - 150 throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse = 0", "72 throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin = 15 esc_pin = 14", "locked, 1 open front_diff_servo_pos = [120, 55] #0 locked, 1 open def steering_angle(angle):", "voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed =", "r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is not None: current_speed = float(current_speed_received) #print(current_speed)", "= time.time() else: target_speed = float(target_speed) if target_speed > 0: if current_speed <", "return default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1))", "> 55: angle = 55 if angle < -55: angle = -55 kit.servo[steering_pin].angle", "break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is not None: current_speed", "rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle", "target_speed is None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed", "int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage',", "12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos = [78, 15] #0 locked, 1", "* 1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received =", "if current_speed_received is not None: current_speed = float(current_speed_received) #print(current_speed) if target_speed is None:", "- in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1)", "gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received =", "#print(\"no driving input received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed = float(target_speed) if", "time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear =", "is not None: current_speed = float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no driving", "from adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable", "open front_diff_servo_pos = [120, 55] #0 locked, 1 open def steering_angle(angle): if angle", "= int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage", "#print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start", "if angle_received is None: #print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000,", "input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0)", "= 0 steering_pin = 15 esc_pin = 14 frontdiff_pin = 11 reardiff_pin =", "speed > 100: speed = 100 if speed < -72: speed = -72", "#print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle') if", "* speed_cap / 100 + 72 driving = True in_motion_start = time.time() while", "import struct import numpy as np from adafruit_servokit import ServoKit r = redis.Redis(host='localhost',", "[78, 15] #0 locked, 1 open front_diff_servo_pos = [120, 55] #0 locked, 1", "else: target_speed = float(target_speed) if target_speed > 0: if current_speed < 0.05 and", "gear_servo_pos = [0, 60, 110] rear_diff_servo_pos = [78, 15] #0 locked, 1 open", "= int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked]", "= gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received", "driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else:", "1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20) time.sleep(1) steering_angle(20) time.sleep(1) steering_angle(-20) time.sleep(1)", "import redis import struct import numpy as np from adafruit_servokit import ServoKit r", "if angle < -55: angle = -55 kit.servo[steering_pin].angle = -angle + 88 def", "= 14 frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos =", "int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear]", "1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\")", "= speed * speed_cap / 100 + 72 driving = True in_motion_start =", "1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45", "= [0, 60, 110] rear_diff_servo_pos = [78, 15] #0 locked, 1 open front_diff_servo_pos", "int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap =", "speed = -72 kit.servo[esc_pin].angle = speed * speed_cap / 100 + 72 driving", "< -72: speed = -72 kit.servo[esc_pin].angle = speed * speed_cap / 100 +", "= rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received is None: print(\"no battery info\")", "if current_speed < 0.05 and time.time() - in_motion_start > 2: driving_speed_signal(target_speed * 1.5)", "r.get('angle') if angle_received is None: #print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running',", "speed = 100 if speed < -72: speed = -72 kit.servo[esc_pin].angle = speed", "2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\")", "\"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20) time.sleep(1) steering_angle(20) time.sleep(1) steering_angle(-20) time.sleep(1) steering_angle(0)", "11 reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos", "time.time() - in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed *", "== None: return default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked =", "None: #print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) #", "rget_and_float(name, default = None): output = r.get(name) if output == None: return default", "True in_motion_start = time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked',", "rear_diff_servo_pos = [78, 15] #0 locked, 1 open front_diff_servo_pos = [120, 55] #0", "if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if", "kit = ServoKit(channels=16) #controllable variables def rget_and_float(name, default = None): output = r.get(name)", "low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is not", "= -55 kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed): if speed > 100:", "voltages_received = r.get('voltages') if voltages_received is None: print(\"no battery info\") break else: voltages", "import ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable variables def", "#0 locked, 1 open def steering_angle(angle): if angle > 55: angle = 55", "else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed", "current_speed = float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no driving input received\") driving_speed_signal(0)", "if speed < -72: speed = -72 kit.servo[esc_pin].angle = speed * speed_cap /", "steering_pin = 15 esc_pin = 14 frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin", "1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage',", "driving_speed_signal(speed): if speed > 100: speed = 100 if speed < -72: speed", "r.get('current_speed') if current_speed_received is not None: current_speed = float(current_speed_received) #print(current_speed) if target_speed is", "else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle') if angle_received is None:", "driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle') if angle_received is None: #print(\"no", "time.time() angle_received = r.get('angle') if angle_received is None: #print(\"no steering input received\") steering_angle(0)", "received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20)", "float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1))", "variables def rget_and_float(name, default = None): output = r.get(name) if output == None:", "= 45 #percentage of max speed #steering angle 30 - 150 throttle_stop =", "15 esc_pin = 14 frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin = 12", "port=6379, db=0) kit = ServoKit(channels=16) #controllable variables def rget_and_float(name, default = None): output", "1 open front_diff_servo_pos = [120, 55] #0 locked, 1 open def steering_angle(angle): if", "[120, 55] #0 locked, 1 open def steering_angle(angle): if angle > 55: angle", "1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle", "/ 100 + 72 driving = True in_motion_start = time.time() while driving: rear_diff_locked", "= -72 kit.servo[esc_pin].angle = speed * speed_cap / 100 + 72 driving =", "reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos =", "72 driving = True in_motion_start = time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1))", "= np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed')", "time.time() else: target_speed = float(target_speed) if target_speed > 0: if current_speed < 0.05", "rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage of max speed #steering angle 30", "import numpy as np from adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379, db=0)", "def rget_and_float(name, default = None): output = r.get(name) if output == None: return", "55] #0 locked, 1 open def steering_angle(angle): if angle > 55: angle =", "kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5)", "in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving", "throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin = 15 esc_pin", "float(target_speed) if target_speed > 0: if current_speed < 0.05 and time.time() - in_motion_start", "45 #percentage of max speed #steering angle 30 - 150 throttle_stop = 72", "locked, 1 open def steering_angle(angle): if angle > 55: angle = 55 if", "= 13 gearbox_pin = 12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos = [78,", "current_speed < 0.05 and time.time() - in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving", "-72: speed = -72 kit.servo[esc_pin].angle = speed * speed_cap / 100 + 72", "< low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is", "#print(current_speed) if target_speed is None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start = time.time()", "kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed): if speed > 100: speed =", "max speed #steering angle 30 - 150 throttle_stop = 72 throttle_full_forward = 180", "1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle =", "rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage", "open def steering_angle(angle): if angle > 55: angle = 55 if angle <", "int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage =", "adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable variables", "1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage of max speed", "default = None): output = r.get(name) if output == None: return default else:", "60, 110] rear_diff_servo_pos = [78, 15] #0 locked, 1 open front_diff_servo_pos = [120,", "current_speed_received = r.get('current_speed') if current_speed_received is not None: current_speed = float(current_speed_received) #print(current_speed) if", "return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear',", "if target_speed > 0: if current_speed < 0.05 and time.time() - in_motion_start >", "[0, 60, 110] rear_diff_servo_pos = [78, 15] #0 locked, 1 open front_diff_servo_pos =", "gearbox_pin = 12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos = [78, 15] #0", "np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received", "15] #0 locked, 1 open front_diff_servo_pos = [120, 55] #0 locked, 1 open", "= int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage =", "= int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap", "input received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed = float(target_speed) if target_speed >", "-55 kit.servo[steering_pin].angle = -angle + 88 def driving_speed_signal(speed): if speed > 100: speed", "+ 88 def driving_speed_signal(speed): if speed > 100: speed = 100 if speed", "= r.get('angle') if angle_received is None: #print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received))", "int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage of max", "m import redis import struct import numpy as np from adafruit_servokit import ServoKit", "= None): output = r.get(name) if output == None: return default else: return", "if angle > 55: angle = 55 if angle < -55: angle =", "= 15 esc_pin = 14 frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin =", "= float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start", "else: driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time()", "angle_received = r.get('angle') if angle_received is None: #print(\"no steering input received\") steering_angle(0) else:", "esc_pin = 14 frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos", "kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages')", "target_speed > 0: if current_speed < 0.05 and time.time() - in_motion_start > 2:", "+ 72 driving = True in_motion_start = time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked',", "None: return default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked',", "low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received is None: print(\"no battery", "55: angle = 55 if angle < -55: angle = -55 kit.servo[steering_pin].angle =", "voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received = r.get('current_speed')", "in_motion_start = time.time() else: target_speed = float(target_speed) if target_speed > 0: if current_speed", "13 gearbox_pin = 12 gear_servo_pos = [0, 60, 110] rear_diff_servo_pos = [78, 15]", "redis import struct import numpy as np from adafruit_servokit import ServoKit r =", "3.5) #---- speed_cap = 45 #percentage of max speed #steering angle 30 -", "= 11 reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos = [0, 60, 110]", "gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked]", "180 throttle_full_reverse = 0 steering_pin = 15 esc_pin = 14 frontdiff_pin = 11", "= r.get('voltages') if voltages_received is None: print(\"no battery info\") break else: voltages =", "float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no driving input received\") driving_speed_signal(0) in_motion_start =", "= [120, 55] #0 locked, 1 open def steering_angle(angle): if angle > 55:", "driving_speed_signal(target_speed * 1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received", "throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin = 15 esc_pin = 14 frontdiff_pin", "angle_received is None: #print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\")", "= -angle + 88 def driving_speed_signal(speed): if speed > 100: speed = 100", "def driving_speed_signal(speed): if speed > 100: speed = 100 if speed < -72:", "driving = True in_motion_start = time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked", "ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable variables def rget_and_float(name,", "> 100: speed = 100 if speed < -72: speed = -72 kit.servo[esc_pin].angle", "= r.get('current_speed') if current_speed_received is not None: current_speed = float(current_speed_received) #print(current_speed) if target_speed", "%2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break target_speed = r.get('target_speed') current_speed_received =", "speed_cap = 45 #percentage of max speed #steering angle 30 - 150 throttle_stop", "is None: #print(\"no steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03)", "= int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle =", "= float(target_speed) if target_speed > 0: if current_speed < 0.05 and time.time() -", "1) #print(\"driving normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle')", "= r.get('target_speed') current_speed_received = r.get('current_speed') if current_speed_received is not None: current_speed = float(current_speed_received)", "= rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage of max speed #steering angle", "voltages_received is None: print(\"no battery info\") break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received))", "driving_speed_signal(0) in_motion_start = time.time() else: target_speed = float(target_speed) if target_speed > 0: if", "= int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage of", "front_diff_servo_pos = [120, 55] #0 locked, 1 open def steering_angle(angle): if angle >", "in_motion_start = time.time() while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1))", "0: if current_speed < 0.05 and time.time() - in_motion_start > 2: driving_speed_signal(target_speed *", "= rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if", "speed_cap / 100 + 72 driving = True in_motion_start = time.time() while driving:", "int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear', 1)) kit.servo[gearbox_pin].angle = gear_servo_pos[gear] kit.servo[reardiff_pin].angle = rear_diff_servo_pos[rear_diff_locked] kit.servo[frontdiff_pin].angle", "while driving: rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear = int(rget_and_float('gear',", "normal speed\") else: driving_speed_signal(0) #print(\"stopped\") in_motion_start = time.time() angle_received = r.get('angle') if angle_received", "in_motion_start = time.time() angle_received = r.get('angle') if angle_received is None: #print(\"no steering input", "math as m import redis import struct import numpy as np from adafruit_servokit", "r.get(name) if output == None: return default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked',", "-angle + 88 def driving_speed_signal(speed): if speed > 100: speed = 100 if", "kit.servo[frontdiff_pin].angle = front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received is", "55 if angle < -55: angle = -55 kit.servo[steering_pin].angle = -angle + 88", "not None: current_speed = float(current_speed_received) #print(current_speed) if target_speed is None: #print(\"no driving input", "= time.time() angle_received = r.get('angle') if angle_received is None: #print(\"no steering input received\")", "steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20) time.sleep(1) steering_angle(20) time.sleep(1)", "100 + 72 driving = True in_motion_start = time.time() while driving: rear_diff_locked =", "150 throttle_stop = 72 throttle_full_forward = 180 throttle_full_reverse = 0 steering_pin = 15", "numpy as np from adafruit_servokit import ServoKit r = redis.Redis(host='localhost', port=6379, db=0) kit", "= 55 if angle < -55: angle = -55 kit.servo[steering_pin].angle = -angle +", "kit.servo[esc_pin].angle = speed * speed_cap / 100 + 72 driving = True in_motion_start", "= 180 throttle_full_reverse = 0 steering_pin = 15 esc_pin = 14 frontdiff_pin =", "3.5) voltages_received = r.get('voltages') if voltages_received is None: print(\"no battery info\") break else:", "= redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable variables def rget_and_float(name, default =", "info\") break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min())", "break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() < low_battery_voltage: print(voltages.min()) break", "> 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed * 1) #print(\"driving normal", "if speed > 100: speed = 100 if speed < -72: speed =", "if voltages_received is None: print(\"no battery info\") break else: voltages = np.array(struct.unpack('%sf' %2,", "#controllable variables def rget_and_float(name, default = None): output = r.get(name) if output ==", "angle = 55 if angle < -55: angle = -55 kit.servo[steering_pin].angle = -angle", "steering_angle(angle): if angle > 55: angle = 55 if angle < -55: angle", "front_diff_servo_pos[front_diff_locked] low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) voltages_received = r.get('voltages') if voltages_received is None: print(\"no", "received\") driving_speed_signal(0) in_motion_start = time.time() else: target_speed = float(target_speed) if target_speed > 0:", "gear = int(rget_and_float('gear', 1)) low_battery_voltage = rget_and_float('low_battery_voltage', 3.5) #---- speed_cap = 45 #percentage", "else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\") driving_speed_signal(0) steering_angle(-20) time.sleep(1) steering_angle(20)", "r = redis.Redis(host='localhost', port=6379, db=0) kit = ServoKit(channels=16) #controllable variables def rget_and_float(name, default", "frontdiff_pin = 11 reardiff_pin = 13 gearbox_pin = 12 gear_servo_pos = [0, 60,", "and time.time() - in_motion_start > 2: driving_speed_signal(target_speed * 1.5) #print(\"driving faster\") else: driving_speed_signal(target_speed", "default else: return float(output) rear_diff_locked = int(rget_and_float('rear_diff_locked', 1)) front_diff_locked = int(rget_and_float('front_diff_locked', 1)) gear", "print(\"no battery info\") break else: voltages = np.array(struct.unpack('%sf' %2, voltages_received)) if voltages.min() <", "as m import redis import struct import numpy as np from adafruit_servokit import", "speed * speed_cap / 100 + 72 driving = True in_motion_start = time.time()", "steering input received\") steering_angle(0) else: steering_angle(float(angle_received)) r.psetex('log_driving_running', 1000, \"on\") time.sleep(0.03) # ??? print(\"stopping\")" ]
[ "rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x,", "len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN) lwidths = [1*np.log(1 + np.array(z))]", "locale import dill import gzip from shared import * register_matplotlib_converters() locate_set = False", "else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc,", "solid_capstyle='round', zorder=20) # PLOT: dots + thin black x, y = df[[mob_col, Rt_col]].values.T", "color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi],", "# cannot be pd.NA because used in mpl.plot df.sort_index(inplace=True) if kind == 'cases':", "# no-member (generated-members) #pylint: disable = C0302 # too-many-lines \"\"\" This code features", "'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio': ( 40, 1.014), 'Michigan':", "# set up the figure if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5))", "== 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and", "zorder=20, clip_on=False) # dots + thin black for y in [y2]: xx, yy", "figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn =", "Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10)", "PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l,", "available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs =", "import matplotlib.ticker as tckr import matplotlib.patheffects as pthff from colorsys import rgb_to_hls from", "kind == 'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 +", "w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:' +", "ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights = [], [] points_eur, weights_eur", "1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations =", "else 0.8) else: if optimal: front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 +", "else: el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0],", "row in df.iterrows(): location = row.name color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction',", "linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line (END) points = np.array([x,", "= np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit >", "2021). License: MIT Last changes: November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re", "1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991)", "(tails are optional) finals = {} for loc in locations: im, de =", "immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"})", "1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal", "s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black',", "fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if", "pandemic shows that saving lives and protecting economy are non-trade-off objectives\" by Kochanczyk", "cutoff): front += [sd[2]] cutoff = sd[1] return front def put_final_dot(ax, location, x,", "put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color = darken(color_of(loc))", "# PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False,", "kind == 'deaths' df = df[above_min_cumul_indices] # DATA: nullify missing days to obtain", "] if cleanup else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory", "0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65,", "edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths =", "= seg.T el = min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN,", "tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths", "ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ '", "= ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed =", "of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half of 2020", "SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15,", "(len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country", "0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5,", "1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00),", "= 6 ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0,", "locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup", "df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]],", "0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if panel_letter is not None: csv_fn", "z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths", "markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\")", "coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur,", "responses to COVID-19 pandemic shows that saving lives and protecting economy are non-trade-off", "'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not", "because used in mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot be pd.NA because", "(0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), } if", "+ lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi],", "Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR)", "{fig_name}: deaths will be used in place of excess deaths\") if loc not", "color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333',", "g in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs):", "f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc):", "alpha=alpha, color=color) for i in range(1, len(im)): m, ms = [('s', 1.7), ('D',", "enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <= last_avail_day panel_letter", "fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5,", "max(excess_deaths[loc] if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths", "missing_day in missing_days[country]: if df.index[0] <= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] =", "for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])])", "= sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending front = [ sorted_data[0][2] ]", "front def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts =", "register_matplotlib_converters import locale import dill import gzip from shared import * register_matplotlib_converters() locate_set", "optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False) def make_subplot_(ax,", "tckr import matplotlib.patheffects as pthff from colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters", "clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in", "['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs,", "i], de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha)", "sns import numpy as np import scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot", "= 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] =", "color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(())", "# PLOT: variable thickness line for stretch, color in stretches: x, y =", "1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols = 6 nrows", "put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe:", "'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US English locale could not be", "0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80,", "0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None,", "10)] else: front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] -", "0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991) } if label_shifting ==", "header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved", "if index in green_indices: if last_index_is_green is None or last_index_is_green == False: green_stretches", "markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc]", "set. Check tick labels in generated figures.') # -- Shared plot settings --------------------------------------------------------------------------", "loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc))", "= sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind", "ed_locations: print(f\"{loc} in figure {fig_name}: deaths will be used in place of excess", "1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return", "el = 0.15 + lwidths[0][segi] / 8 else: el = 0.10 + lwidths[0][segi]", "8/6*nrows)) for ci, country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths", "marker_size = 6 ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size,", "y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3,", "intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75,", "ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin", "as sns import numpy as np import scipy.stats import statsmodels.stats.weightstats as wstats import", "+ '} ' r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\",", "r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$", "ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:',", "as f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations))", "Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory into", "clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) == 2", "python3 #pylint: disable = C, R #pylint: disable = E1101 # no-member (generated-members)", "+ ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day", "180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in", "ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction',", "left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails", "thin black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5,", "ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--',", "f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\",", "- i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1],", "in trajs[cc].index, \\ print(f\"Day {last_day} not available for {cc} that ends on\", trajs[cc].tail(1).index)", "t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt):", "day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) #", "= trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day)", "size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------ #", "xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes", "'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia':", "ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy() # DATA: begin each", "half of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)')", "i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--',", "f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn)", "1e6*is_USA_state(cc) elif kind == 'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] /", "if loc in spines: if loc == 'left': spine.set_position(('outward', left_shift)) elif loc ==", "10, 30, 100, 300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0,", "deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index", "put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4,", "solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory into temporally-ordered stretches df_freq =", "0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes", "plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55,", "+ str(t**2) + '}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts,", "im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color = darken(color_of(loc)) alpha =", "ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <-- THIS #de = de.set_index( de.index.shift(-days_back, freq", "ed_locations = excess_deaths.keys() points, weights = [], [] points_eur, weights_eur = [], []", "or (not optima and sd[1] > cutoff): front += [sd[2]] cutoff = sd[1]", "color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y", "low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed", "~df[col2].isnull() ] if cleanup else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval):", "color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'),", "final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'],", "locations: if skipped and country in skipped: continue df = trajs[country] df_sel =", "header='lockdown,gdp_loss', delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved", "code features the article \"Pareto-based evaluation of national responses to COVID-19 pandemic shows", "] cutoff = sorted_data[0][1] for sd in sorted_data[1:]: if (optima and sd[1] <", "the article \"Pareto-based evaluation of national responses to COVID-19 pandemic shows that saving", "seg.T el = min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el)", "locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US English locale could not", "extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax,", "deaths = deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return", "( 0, 0.999), 'Georgia': (825, 0.991) } if label_shifting == 'A' else {}", "lwidths = [1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2) segments =", "[[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 +", "return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc)", "continue df = trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day =", "return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split())", "fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color,", "= False stretches = [( g, SNAIL_GREEN ) for g in green_stretches] \\", "print(f\"Saved figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name,", "0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex',", "C, R #pylint: disable = E1101 # no-member (generated-members) #pylint: disable = C0302", "2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) |", "thin black for y in [y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx +", "day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn =", "linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes", "df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z = 0.7*np.log(0 +", "(0.10, 0.00, 0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania':", "4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy() # DATA:", "day ax.annotate(str('Date:' if show_corr_history else 'Last day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\",", "half of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights", "= [__ for _ in fronts for __ in _] finals_remaining = [(*im_de,", "1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00),", "('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col", "g, SNAIL_GREEN ) for g in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng", "y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments):", "m, ms = [('s', 1.7), ('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 -", "y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments):", "sorted_data[1:]: if (optima and sd[1] < cutoff) or (not optima and sd[1] >", "markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax,", "alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40)", "if kind == 'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction',", "0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert", "half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value,", "/ population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths': if cc in low_mortality_locations: return", "[1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]],", "& Lipniacki (Scientific Reports, 2021). License: MIT Last changes: November 09, 2020 \"\"\"", "'Florida': ( 0, 0.987), 'Illinois': ( 90, 1.016), 'North Carolina': (-10, 0.97 ),", "clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0,", "coefficients values = np.array(list(finals.values())) weights = np.array([population(loc) for loc in finals.keys()]) rho, wrho", "alpha=0.8, linewidth=1.1 if optimal else 0.8) else: if optimal: front_coords = [[front_coords[0][0] +", "fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() +", "= df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <-- THIS", "trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x, y,", "__ in _] finals_remaining = [(*im_de, loc) for loc, im_de in finals.items() if", "scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color =", "set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1]", "alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths': # ==--- days_back = 14 x,", "0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20,", "fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\",", "{fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] =", "f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if panel_letter", "0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths',", "saving lives and protecting economy are non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific", "country in colors.keys(): return colors[country] else: return dull_color def correlations(values, weights): rho =", "csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths',", "+ f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ =", "= sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal))) # TMP: was 0.15+0.1* front_coords", "(im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) #", "plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad']", "0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5", "dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia':", "in [y1, y2, y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy,", "'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf", "\" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$", "[], [] last_index_is_green = None for index, value in df.iterrows(): if index in", "of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in", "seg in enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] -", "# colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def", "= correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ =", "[], [] for i in range(9): points, weights = [], [] for loc", "ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i = ci//ncols if row_i", "y1, y2 = [], [], [] for i in range(9): points, weights =", "[ sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd in sorted_data[1:]: if (optima and", "= True elif index in nongreen_indices: if last_index_is_green is None or last_index_is_green ==", "), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio': ( 40, 1.014),", "available for {cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) +", "color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)),", "deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii", "ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$' for t in ticks[1:]]) def", "Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65),", "80, 0.92 ), 'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018), 'Florida': ( 0,", "'-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location))", "{rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}", "f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths,", "= -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval == 'monthly': immobi", "'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018), 'Florida': ( 0, 0.987), 'Illinois': (", "de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo)", "not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths']", "1.7), ('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 - i], de[-1 - i],", "nongreen_stretches = [], [] last_index_is_green = None for index, value in df.iterrows(): if", "continue # PLOT: X-axis row_i = ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1)", "SNAIL_GREEN ) for g in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng in", "10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z,", "thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15)", "s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40)", "0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--',", "color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's", "y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx +", "va=\"center\") for vi, v in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] +", "[3, 10, 30, 100, 300, 1000, 3000, 10000] x = np.array(list(range(len(z)))) y1 =", "locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal):", "'cases': el = 0.15 + lwidths[0][segi] / 8 else: el = 0.10 +", "'-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths': # ==--- days_back", "== 'cases': assert last_day in trajs[cc].index, \\ print(f\"Day {last_day} not available for {cc}", "put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30, 100, 300, 1000, 3000, 10000] x", "(940, 1.0 ), 'Norway': ( 20, 0.88 ), 'South Korea': ( 52, 0.59", "'deaths': # ==--- days_back = 14 x, y = df[[mob_col, Rt_col]].values.T points =", "loc in finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction',", "ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4))", "and ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths':", "try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX')", "trajs_trimmed[country].copy() # DATA: begin each trajectory since 100 cumulative cases min_cumul = 100", "'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France':", "if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df", "image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return", "in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points", "too-many-lines \"\"\" This code features the article \"Pareto-based evaluation of national responses to", "make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights = [], [] points_eur, weights_eur = [],", "extra_shift = -0.08 if v in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1],", "gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations,", "Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5", "by_per_capita(cc): if kind == 'cases': assert last_day in trajs[cc].index, \\ print(f\"Day {last_day} not", "even if kind == 'deaths' df = df[above_min_cumul_indices] # DATA: nullify missing days", "True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX')", "= np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0,", "slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = [] for _, row in df.iterrows():", "label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x + 65 - (0 if location not", "de.index.shift(-days_back, freq ='D') ) # <-- not this z = de.join(ca) z['cases14_per_death14'] =", "0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00),", "= jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <= last_avail_day panel_letter = chr(ord('A') +", "= plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 = plot_gdp_drop_and_excess_deaths(trajectories, locations, jul01, excess_deaths, gdp_2020h1,", "= scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color", "+= [index] last_index_is_green = True elif index in nongreen_indices: if last_index_is_green is None", "# DATA: begin each trajectory since 100 cumulative cases min_cumul = 100 above_min_cumul_indices", "1000) np.place(z, z < 0, 0) lwidths = [z] for segi, seg in", "xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15,", "for sd in sorted_data[1:]: if (optima and sd[1] < cutoff) or (not optima", "'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina':", "= [] for i in range(n_fronts): fronts_locations = [__ for _ in fronts", "ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi]", "# PLOT: legend for ax in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if", "$\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center',", "= 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8,", "November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from operator import itemgetter from", "elif last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green = False stretches = [(", "in colors.keys(): return colors[country] else: return dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0],", "ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction',", "italic: loc = r'\\textit{' + loc + r'}' if label_shifting == 'A': ax.annotate(loc,", "True: if green_stretches: green_stretches[-1] += [index] # extra point for smooth joins nongreen_stretches", "0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20,", "co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif", "df.set_index('location') for loc in locations: if not loc in gdp_2020h1: print(f\"{loc}: missing GDP", "[] for i in range(n_fronts): fronts_locations = [__ for _ in fronts for", "gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values,", "= None for index, value in df.iterrows(): if index in green_indices: if last_index_is_green", "interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df =", "== True: green_stretches[-1] += [index] last_index_is_green = True elif index in nongreen_indices: if", "kind == 'deaths': # ==--- days_back = 14 x, y = df[[mob_col, Rt_col]].values.T", "make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative", "as tckr import matplotlib.patheffects as pthff from colorsys import rgb_to_hls from pandas.plotting import", "colors.keys(): return colors[country] else: return dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0]", "'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland':", "= 3.5 diameter = np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x], [y], '-.',", "locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04),", "data\") continue is_in_Europe = not loc in STATE_TO_ABBREV and not loc in ['Canada',", "'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\",", "r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data", "0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x,", "<-- THIS #de = de.set_index( de.index.shift(-days_back, freq ='D') ) # <-- not this", "= \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ =", "fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 = plot_gdp_drop_and_excess_deaths(trajectories, locations, jul01, excess_deaths,", "COVID-19 pandemic shows that saving lives and protecting economy are non-trade-off objectives\" by", "fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif", "'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90,", "points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T el = 0.1", "0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60),", "# export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image", "loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc)", "weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted)", "marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else:", "= location.replace('United Kingdom', 'UK') if italic: loc = r'\\textit{' + loc + r'}'", "len(df_freq) == len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index", "low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] /", "if location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data',", "de = df[['new_deaths14']] ca = df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D')", "x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx", "deaths = deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return", "cols = ['mobility', 'new_deaths'] # set up the figure if show_corr_history: fig, axes", "= [] for _, row in df.iterrows(): location = row.name color = color_of(location)", "{fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30, 100, 300,", "weights_eur = [], [] for loc in locations: if population(loc) < MIN_POPULATION_M or", "spines: if loc == 'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift))", "Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col],", "vi, v in enumerate(z): for y in [y1, y2]: extra_shift = -0.08 if", "[(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches = sorted(stretches,", "Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada':", "ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8,", "_ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations): ax =", "df[['new_deaths14']] ca = df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D') ) #", "= trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else df", "variable thickness line for stretch, color in stretches: x, y = df.loc[stretch, [mob_col,", "points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de =", "cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in locations: if not loc in", "cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ '", "\\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0,", "alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness line (BEGIN) lwidths = [0.7", "[1*np.log(1 + z)] for segi, seg in enumerate(segments): seg = seg.T if kind", "plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts,", "cases min_cumul = 100 above_min_cumul_indices = df['total_cases'] >= min_cumul # cases even if", "colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale import dill import gzip", "* 3 light_color = color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o',", "len(im)): m, ms = [('s', 1.7), ('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1", "and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be pd.NA because used", "'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1) for i in", "'South Korea': ( 52, 0.59 ), 'Portugal': ( 0, 0.97 ), 'Bulgaria': (830,", "# PLOT: X-axis row_i = ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100,", "axis=1) for segi, seg in enumerate(segments): seg = seg.T el = 0.1 +", "seg.T el = 0.1 + (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025,", "+ 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations,", "values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn)", "stretch, color in stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x,", "be set. Check tick labels in generated figures.') # -- Shared plot settings", "scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines, left_shift=15,", "cols, force_end=final_day) assert final_day <= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes,", "and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False)", "last_index_is_green is None or last_index_is_green == True: if green_stretches: green_stretches[-1] += [index] #", "h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR)", "(0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00,", "figsize=(11, 5)) for i, fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols,", "in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations if", "front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal))) #", "10000), 10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000)", "+ 0.04*(max(0, front_i - 1*optimal))) # TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for", "in df.iterrows(): if index in green_indices: if last_index_is_green is None or last_index_is_green ==", "300, 1000, 3000, 10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31", "min_cumul = 100 above_min_cumul_indices = df['total_cases'] >= min_cumul # cases even if kind", "ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi,", "0.999), 'Georgia': (825, 0.991) } if label_shifting == 'A' else {} if show_population_halo:", "**ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices", "in locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\")", "fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d,", "print('Warning: US English locale could not be set. Check tick labels in generated", "(0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95,", "np.array([population(loc) for loc in finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97),", "list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False,", "clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT:", "0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x,", "correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \"", "[index] # extra point for smooth joins nongreen_stretches += [ [index] ] elif", "0.88 ), 'South Korea': ( 52, 0.59 ), 'Portugal': ( 0, 0.97 ),", "0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for", "row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) #", "'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if loc in excess_deaths else 0,", "$\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$", "fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M =", "& '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {} assert len(cols)", "and protecting economy are non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific Reports, 2021).", "export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return", "+= [sd[2]] cutoff = sd[1] return front def put_final_dot(ax, location, x, y, is_extra_country=False,", "enumerate(segments): seg = seg.T if kind == 'cases': el = 0.15 + lwidths[0][segi]", "# export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\"", "ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total =", "(0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50),", "14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20)", "0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania':", "figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories,", "ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories,", "wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$", "= C0302 # too-many-lines \"\"\" This code features the article \"Pareto-based evaluation of", "locations = sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols = 6 nrows =", "0) lwidths = [1*np.log(1 + z)] for segi, seg in enumerate(segments): seg =", "b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0,", "lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data,", "i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color, alpha=0.8,", "(0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan':", "ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color,", "color in stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1,", "immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1) for i", "foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading =", "= plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half", "country in locations: if skipped and country in skipped: continue df = trajs[country]", "operator import itemgetter from multiprocessing import Pool import pandas as pd import seaborn", "0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25,", "panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter)", "immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval == 'monthly':", "'.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {} assert len(cols) ==", "6 ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8,", "180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal", "zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop", "import * register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set =", "y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0))", "(0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00,", "export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image as", "xycoords='data', xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations,", "evaluation of national responses to COVID-19 pandemic shows that saving lives and protecting", "5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = [] for", "locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 =", "1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # --", "= df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country,", "markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y], '-.',", "vi, v in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05", "label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y", "[index] ] elif last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green = True elif", "xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols,", "lwidths[0][segi] / 8 else: el = 0.10 + lwidths[0][segi] / 14 co =", "ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1, y2 = [], [], []", "0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1))", "False: nongreen_stretches[-1] += [index] last_index_is_green = False stretches = [( g, SNAIL_GREEN )", "case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M)", "from multiprocessing import Pool import pandas as pd import seaborn as sns import", "= list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) +", "l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots +", "adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax", "= extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5))", "axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction',", "fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in enumerate(final_day): last_avail_day, trajs", "excess_deaths.keys() points, weights = [], [] points_eur, weights_eur = [], [] for loc", "$\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates", "1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg", "0.15 + lwidths[0][segi] / 8 else: el = 0.10 + lwidths[0][segi] / 14", "line for stretch, color in stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points", "panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2,", "sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols = 6 nrows = (len(locations))//ncols +", "fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} ' r'new cases", "0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20,", "= '#f8f6f4' ncols = 6 nrows = (len(locations))//ncols + 1 fig, _ =", "points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho)", "5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half", "= fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations if kind == 'deaths'", "= [0.7 * (0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2) segments", "if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue if", "kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not", "last_index_is_green = True elif index in nongreen_indices: if last_index_is_green is None or last_index_is_green", "= color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom',", "y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) #", "the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020", "= [], [] for loc in locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia':", "ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind ==", "plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''')", "0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63),", "+ 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in", "plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT =", "in missing_days[country]: if df.index[0] <= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan", "0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25,", "loc in locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure", "show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories,", "= np.array([population(loc) for loc in finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0,", "= 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z <", "ax.annotate(str('Date:' if show_corr_history else 'Last day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0,", "(generated-members) #pylint: disable = C0302 # too-many-lines \"\"\" This code features the article", "0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45,", "['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot", "York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas':", "y2]: extra_shift = -0.08 if v in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000',", "+ 1e6*is_USA_state(cc) elif kind == 'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"]", "days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is None: if", "plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st}", "linestyle='--', linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy() # DATA: begin each trajectory", ") # <-- THIS #de = de.set_index( de.index.shift(-days_back, freq ='D') ) # <--", "in ed_locations: print(f\"{loc} in figure {fig_name}: deaths will be used in place of", "+ i)/360*2*3.14159)] for i in range(0, 91, 10)] else: front_coords = [[front_coords[0][0] -", "$\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted", "rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's", "= 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for i in range(1, len(im)):", "def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5))", "locations: if not loc in gdp_2020h1: print(f\"{loc}: missing GDP data in figure {fig_name}\")", "ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week /M', xy=(0.5, 0.22), xycoords='axes", "wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items(): if loc", "if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor',", "xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left',", "for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6,", "assert len(cols) == 2 for country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] =", "shared import * register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set", "( 90, 1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia':", "= plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories,", "zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3, 10, 30, 100, 300] x", "def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df =", "markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y], '-.', marker='8'", "last_index_is_green = None for index, value in df.iterrows(): if index in green_indices: if", "return dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1]", "PLOT: deaths in low-mortality locations if kind == 'deaths' and country in low_mortality_locations:", "np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca = df[['new_cases14' ]] ca = ca.set_index(", "matplotlib.ticker as tckr import matplotlib.patheffects as pthff from colorsys import rgb_to_hls from pandas.plotting", "coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image as PDF", "+ f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn", "trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <= last_avail_day panel_letter = chr(ord('A')", "va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} ' r'new cases /week /M',", "color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR)", "nongreen_indices: if last_index_is_green is None or last_index_is_green == True: if green_stretches: green_stretches[-1] +=", "seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black", "fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} ' r'new", "df['total_cases'] >= min_cumul # cases even if kind == 'deaths' df = df[above_min_cumul_indices]", "n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:' if show_corr_history", "alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1])", "darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for i in", "axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z", "locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except:", "else: if optimal: front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0]", "def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho,", "1*optimal))) # TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for loc in front]).T if", "print(f\"{loc} skipped in figure {fig_name} because of missing GDP data\") continue is_in_Europe =", "fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30, 100, 300, 1000, 3000,", "| (tests_per_hit > 10000), 10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0)", "= np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T el", "ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in [y1, y2]: extra_shift", "sd[1] return front def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False):", "xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes fraction',", "(0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New", "if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc =", "# export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file", "pandas as pd import seaborn as sns import numpy as np import scipy.stats", "not in ed_locations: print(f\"{loc} in figure {fig_name}: deaths will be used in place", "= dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01,", "else label_shifts[location][0]), y**0.9999 * (1 if location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location),", "locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf", "= trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5,", "fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w,", "each trajectory since 100 cumulative cases min_cumul = 100 above_min_cumul_indices = df['total_cases'] >=", "'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00),", "~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data)", "annotation: last day ax.annotate(str('Date:' if show_corr_history else 'Last day:') + \\ f\" {final_day.strftime('%B", "color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories,", "for missing_day in missing_days[country]: if df.index[0] <= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col]", "rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax,", "y2 = [], [], [] for i in range(9): points, weights = [],", "in fronts for __ in _] finals_remaining = [(*im_de, loc) for loc, im_de", "fronts_locations = [__ for _ in fronts for __ in _] finals_remaining =", "wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for", "1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'],", "if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return", "excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10)", "1)) # variable thickness line (BEGIN) lwidths = [1*np.log(1 + np.array(z))] points =", "last_index_is_green is None or last_index_is_green == False: green_stretches += [ [index] ] elif", "== False: nongreen_stretches[-1] += [index] last_index_is_green = False stretches = [( g, SNAIL_GREEN", "= np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca = df[['new_cases14' ]] ca =", "color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United", "thin black for y in [y1, y2, y3]: xx, yy = x[:-1], y[:-1]", "ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df =", "8 else: el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el)", "deaths will be used in place of excess deaths\") if loc not in", "+ thin black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000',", "l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes", "= f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image as PDF fig.tight_layout() fn", "Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's", "'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia':", "'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60,", "fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in locations:", "0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10)", "$\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) #", "= immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths':", "location.replace('United Kingdom', 'UK') if italic: loc = r'\\textit{' + loc + r'}' if", "ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7)", "zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3),", "figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt):", "= max(excess_deaths[loc] if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc],", "'South Korea'] deaths = max(excess_deaths[loc] if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x,", "0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75),", "= ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa',", "= f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF fig.tight_layout() fn", "(0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75,", "if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size", "locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left',", "\\ print(f\"Day {last_day} not available for {cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day,", "is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points),", "kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor),", "for ax in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind == 'cases':", "plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9),", "xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom']", "rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data =", "y3]: xx = [float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx,", "plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00),", "Carolina': (0.10, 0.00, 0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00),", "1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\",", "fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}',", "np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN)", "if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths /", "+ 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)]", "y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark': (940, 1.0 ),", "zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind", "# DATA: partition trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ /", "{wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn,", "- 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable", "= np.nan # cannot be pd.NA because used in mpl.plot df.sort_index(inplace=True) if kind", "1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 - i], de[-1 - i], '.', marker=m,", "+ sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$' for t", "(ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs", "np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [z]", "np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line for y in", "is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark': (940, 1.0 ), 'Norway':", "PLOT: legend for ax in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind", "print(f\"{loc} in figure {fig_name}: deaths will be used in place of excess deaths\")", "== '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1", "as plt import matplotlib.dates as dts import matplotlib.ticker as tckr import matplotlib.patheffects as", "fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction", "front_coords = np.array([finals[loc] for loc in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':'", "f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1) for i in ii] return df", "markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True)", "in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M)", "xy=(x + 65 - (0 if location not in label_shifts else label_shifts[location][0]), y**0.9999", "0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness", "disable = C, R #pylint: disable = E1101 # no-member (generated-members) #pylint: disable", "v in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5,", "(0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70),", "alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\",", "ymax)) ticks = list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' +", "zorder=10) # variable thickness line (BEGIN) lwidths = [0.7 * (0 + np.log(z))]", "clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\",", "0.50, 0.25), } if country in colors.keys(): return colors[country] else: return dull_color def", "values, values_eur = np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur, wrho_eur =", "in [y1, y2]: extra_shift = -0.08 if v in [100, 300, 1000] else", "h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h])", "def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64')", "else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines:", "alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y in [y2]:", "manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN,", "= True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME,", "be pd.NA because used in mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot be", "xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',')", "= axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes", "if location not in label_shifts else label_shifts[location][0]), y**0.9999 * (1 if location not", "zorder=5) weights = [] for _, row in df.iterrows(): location = row.name color", "= sorted(stretches, key=by_first_day) # PLOT: variable thickness line for stretch, color in stretches:", "& ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data =", "= 6 nrows = (len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols,", "ii = df.index df.index = [i.replace(day=1) for i in ii] return df elif", "interval == 'daily': immobi = immobi.cumsum() deaths = deaths.cumsum() / ppl df =", "if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last", "0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [z] for", "z < 0, 0) lwidths = [z] for segi, seg in enumerate(segments): seg", "of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights =", "import scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt import matplotlib.dates as", "clip_on=False) # dots + thin black for y in [y2]: xx, yy =", "+ 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None,", "('p', 2.2)][i % 3] ax.plot(im[-1 - i], de[-1 - i], '.', marker=m, markersize=ms,", "return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185,", "# x-ascending front = [ sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd in", "population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue if loc", "PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def", "print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is None: if verbose: print(f\"Last shared", "else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom',", "f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax", "def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3))", "0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40,", "'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95),", "scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def", "y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho,", "header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] # set up the figure if show_corr_history:", "linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of 2020')", "= [3, 10, 30, 100, 300, 1000, 3000, 10000] x = np.array(list(range(len(z)))) y1", "line (END) points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1)", "weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur, wrho_eur", "= immobi.cumsum() deaths = deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths':", "# variable thickness line (BEGIN) lwidths = [0.7 * (0 + np.log(z))] points", "locate_set: print('Warning: US English locale could not be set. Check tick labels in", "# variable thickness line (END) points = np.array([x, y2]).T.reshape(-1, 1, 2) segments =", "[100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data',", "edited_trajs = {} assert len(cols) == 2 for country in locations: df =", "0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio': ( 40,", "marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1.,", "else {} if show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location)) * 3 light_color", "\" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \"", "locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8')", "0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70,", "0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0),", "linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = [] for _, row in df.iterrows(): location", "italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur =", "'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50,", "= f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',')", "+ f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" +", "matplotlib.patheffects as pthff from colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale", "the 1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys()", "show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False,", "sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind ==", "if kind == 'cases': el = 0.15 + lwidths[0][segi] / 8 else: el", "elif interval == 'daily': immobi = immobi.cumsum() deaths = deaths.cumsum() / ppl df", "ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$' for", "(0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), } if country in colors.keys(): return", "= immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily': immobi", "(0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05,", "# extra point for smooth joins nongreen_stretches += [ [index] ] elif last_index_is_green", "df.at[missing_day,mob_col] = np.nan # cannot be pd.NA because used in mpl.plot df.at[missing_day, Rt_col]", "dots + thin black for y in [y2]: xx, yy = x[:-1], y[:-1]", "as pd import seaborn as sns import numpy as np import scipy.stats import", "ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40,", "= np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg", "ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) #", "cols, force_end=fday) assert fday <= last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs,", "adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) #", "in finals.items() if loc not in fronts_locations and loc not in OUT_OF_FRONT] front", "(0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00),", "else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6", "Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional) finals = {} for", "if last_index_is_green is None or last_index_is_green == False: green_stretches += [ [index] ]", "make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax + sep, sep)) ax.set_yticks(ticks)", "locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1],", "if panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols", "freq ='D') ) # <-- not this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14']", "1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half", "= immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0,", "last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind", "clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] /", "va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom'])", "fraction', color=ANNOT_COLOR) # export coordinates if panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\"", "(not optima and sd[1] > cutoff): front += [sd[2]] cutoff = sd[1] return", "fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <=", "scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in", "left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items(): if loc in spines: if loc", "disable = C0302 # too-many-lines \"\"\" This code features the article \"Pareto-based evaluation", "color = color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8,", "article \"Pareto-based evaluation of national responses to COVID-19 pandemic shows that saving lives", "30, 100, 300, 1000, 3000, 10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2", "-------------------------------------------------------------------------------------------------- import re from operator import itemgetter from multiprocessing import Pool import pandas", "'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00),", "ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least", "x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30)", "<= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False,", "in fronts_locations and loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for", "enumerate(segments): seg = seg.T el = 0.1 + (lwidths[0][segi] - 0.)/14 color =", "(830, 0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio': ( 40, 1.014), 'Michigan': (800,", "front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)]", "'#f8f6f4' ncols = 6 nrows = (len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows,", "= '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors", "for country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull()", "'} ' r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\")", "180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in", "= np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN) lwidths =", "not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x +", "de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7", "export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\")", "day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>',", "[[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 +", "0.92 ), 'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018), 'Florida': ( 0, 0.987),", "(lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) #", "df.iterrows(): if index in green_indices: if last_index_is_green is None or last_index_is_green == False:", "+= [ [index] ] elif last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green =", "interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df =", "df.loc[loc] = [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10)", "f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn =", "y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33,", "figure if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in", "x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark': (940, 1.0", "== 'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if", "v in enumerate(z): for y in [y1, y2]: extra_shift = -0.08 if v", "'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else:", "key=itemgetter(0, 1), reverse=not optima) # x-ascending front = [ sorted_data[0][2] ] cutoff =", "x-ascending front = [ sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd in sorted_data[1:]:", "-- Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada',", "darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for", "de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1],", "cumulative cases min_cumul = 100 above_min_cumul_indices = df['total_cases'] >= min_cumul # cases even", "xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7)", "ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False,", "ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5):", "spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax):", "ax.set_ylim((0, ymax)) ticks = list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{'", "trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor =", "= df[['new_deaths14']] ca = df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D') )", "$\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted", "because of missing GDP data\") continue is_in_Europe = not loc in STATE_TO_ABBREV and", "de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts:", "loc == 'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none')", "__name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day, missing_days, excess_deaths,", "(Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\"", "fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in [y1, y2]:", "0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00,", "len(cols) == 2 for country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[", "linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction',", "no-member (generated-members) #pylint: disable = C0302 # too-many-lines \"\"\" This code features the", "generated figures.') # -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] =", "df = trajs_trimmed[country].copy() # DATA: begin each trajectory since 100 cumulative cases min_cumul", "'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return", "if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is None: if verbose:", "np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line", "fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01,", "clip_on=False) x, y1, y2 = [], [], [] for i in range(9): points,", "PLOT: X-axis row_i = ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0))", "+ \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation:", "key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols = 6 nrows = (len(locations))//ncols + 1", "C0302 # too-many-lines \"\"\" This code features the article \"Pareto-based evaluation of national", "cases even if kind == 'deaths' df = df[above_min_cumul_indices] # DATA: nullify missing", "\\ + [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches", "annotation: correlation coefficients values = np.array(list(finals.values())) weights = np.array([population(loc) for loc in finals.keys()])", "l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20,", "enumerate(z): for y in [y1, y2]: extra_shift = -0.08 if v in [100,", "color=ANNOT_COLOR) # annotation: correlation coefficients values = np.array(list(finals.values())) weights = np.array([population(loc) for loc", "import Pool import pandas as pd import seaborn as sns import numpy as", "xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom']", "sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots", "col1, col2 = cols days_of_last_available_data = set() for country in locations: if skipped", "immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily': immobi =", "immobi.cumsum() deaths = deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"})", "xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes", "xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z", "seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line (END)", "np.place(z, z < 0, 0) lwidths = [z] for segi, seg in enumerate(segments):", "alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y in [y1,", "path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT:", "solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y in [y1, y2,", "False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8')", "for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color,", "intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = [] for _, row", "f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction',", "weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" +", "light_color = color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter,", "label_shifting='A', italic=False): label_shifts = { 'Denmark': (940, 1.0 ), 'Norway': ( 20, 0.88", "np.nan # cannot be pd.NA because used in mpl.plot df.sort_index(inplace=True) if kind ==", "for vi, v in enumerate(z): for y in [y1, y2]: extra_shift = -0.08", "n_fronts, optimal): fronts = [] for i in range(n_fronts): fronts_locations = [__ for", "if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose:", "y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True,", "with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f)", "lwidths = [0.7 * (0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2)", "kind == 'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9,", "0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss',", "de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc)", "green_indices: if last_index_is_green is None or last_index_is_green == False: green_stretches += [ [index]", "/ z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z", "dill import gzip from shared import * register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME,", "/week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' +", "fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = [] for i in", "{fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction", "markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$')", "{} if show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location)) * 3 light_color =", "DATA: partition trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country)", "[1, 3, 10, 30, 100, 300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1,", "df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1)", "markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if italic: loc", "0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10,", "= plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations,", "optimal else 0.8) else: if optimal: front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180", "xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155,", "deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif", "im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points)", "Kochanczyk & Lipniacki (Scientific Reports, 2021). License: MIT Last changes: November 09, 2020", "+ r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x + 65 - (0", "in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13,", "ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x,", "file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig,", "and loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front", "DATA: begin each trajectory since 100 cumulative cases min_cumul = 100 above_min_cumul_indices =", "in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ] if", "el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1],", "spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom')", "color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if italic: loc = r'\\textit{' +", "), 'Norway': ( 20, 0.88 ), 'South Korea': ( 52, 0.59 ), 'Portugal':", "'Last day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR)", "xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67),", "continue is_in_Europe = not loc in STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan',", "f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') #", "force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {} assert len(cols) == 2 for country", "force_end=fday) assert fday <= last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs, locations,", "ca = df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <--", "np import scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt import matplotlib.dates", "= min(days_of_last_available_data) if force_end is None: if verbose: print(f\"Last shared available day ({'", "FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be pd.NA because used in mpl.plot df.at[missing_day,", "= wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc,", "0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left',", "correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \"", "'Peru': (0.75, 0.50, 0.25), } if country in colors.keys(): return colors[country] else: return", "%Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file", "= '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium':", "'A' else {} if show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location)) * 3", "= [1*np.log(1 + z)] for segi, seg in enumerate(segments): seg = seg.T if", "nullify missing days to obtain visual discontinuities for missing_day in missing_days[country]: if df.index[0]", "not available for {cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc)", "pd.NA because used in mpl.plot df.sort_index(inplace=True) if kind == 'cases': # ==--- #", "in figure {fig_name} because of missing GDP data\") continue is_in_Europe = not loc", "'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def", "loc in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if loc in", "as np import scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt import", "0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018),", "df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d'))", "0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y", "if optimal: front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] +", "Last changes: November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from operator import", "in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1", "def by_per_capita(cc): if kind == 'cases': assert last_day in trajs[cc].index, \\ print(f\"Day {last_day}", "='D') ) # <-- not this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] /", "2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err =", "5)) for i, fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday)", "# export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\")", "df elif interval == 'daily': immobi = immobi.cumsum() deaths = deaths.cumsum() / ppl", "if skipped and country in skipped: continue df = trajs[country] df_sel = df[", "ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else", "= 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-',", "color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes", "if kind == 'deaths' df = df[above_min_cumul_indices] # DATA: nullify missing days to", "0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"]", "seg in enumerate(segments): seg = seg.T el = 0.1 + (lwidths[0][segi] - 0.)/14", "to COVID-19 pandemic shows that saving lives and protecting economy are non-trade-off objectives\"", "lives and protecting economy are non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific Reports,", "points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi,", "f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01", "trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths': if cc in", "sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending front = [ sorted_data[0][2]", "xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5,", "since 100 cumulative cases min_cumul = 100 above_min_cumul_indices = df['total_cases'] >= min_cumul #", "ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\",", "0.59 ), 'Portugal': ( 0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80,", "= force_end edited_trajs = {} assert len(cols) == 2 for country in locations:", "({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {} assert", "weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def", "spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in", "+ 65 - (0 if location not in label_shifts else label_shifts[location][0]), y**0.9999 *", "assert kind in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New", "color=ANNOT_COLOR) # export coordinates if panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn,", "Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases': assert last_day in trajs[cc].index,", "'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction':", "used in mpl.plot df.sort_index(inplace=True) if kind == 'cases': # ==--- # PLOT: pink", "mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red", "0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway':", "gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in locations: if not loc in gdp_2020h1:", "trajs[cc].index, \\ print(f\"Day {last_day} not available for {cc} that ends on\", trajs[cc].tail(1).index) return", "or last_index_is_green == True: if green_stretches: green_stretches[-1] += [index] # extra point for", "for {cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc)", "country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False,", "+ 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5,", "not in fronts_locations and loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front)", "(0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30,", "f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks =", "fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3, 10, 30,", "kind in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand']", "df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in locations: if", "== 'cases': el = 0.15 + lwidths[0][segi] / 8 else: el = 0.10", "in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"]", "np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for", "== 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w,", "0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65,", "0.04*(max(0, front_i - 1*optimal))) # TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for loc", "solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line (END) points = np.array([x, y2]).T.reshape(-1, 1,", "correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR)", "= deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df", "True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US English locale", "coordinates if panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',')", "-- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] =", "[r'$\\sqrt{' + str(t**2) + '}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day,", "show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im,", "show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4", "np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1", "ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin", "gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations", "for segi, seg in enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 +", "de, '-', linewidth=0.8, alpha=alpha, color=color) for i in range(1, len(im)): m, ms =", "# DATA df = trajs_trimmed[country].copy() # DATA: begin each trajectory since 100 cumulative", "0.00, 0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20,", "seaborn as sns import numpy as np import scipy.stats import statsmodels.stats.weightstats as wstats", "300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75,", "English locale could not be set. Check tick labels in generated figures.') #", "show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False,", "multiprocessing import Pool import pandas as pd import seaborn as sns import numpy", "wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$", "clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when", "0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00),", "+ 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i", "= df.set_index('location') for loc in locations: if not loc in gdp_2020h1: print(f\"{loc}: missing", "ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations if kind ==", "by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT: variable thickness line for", "\\textbf{at least 1} new death /week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\",", "line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition", "z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z <", "0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co,", "} if label_shifting == 'A' else {} if show_population_halo: marker_size = 3.5 diameter", "show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x],", "is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size =", "weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho, wrho", "ca = ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <-- THIS #de = de.set_index(", "PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig if", "or last_index_is_green == False: green_stretches += [ [index] ] elif last_index_is_green == True:", "zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR)", "Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA", "= \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export", "'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991) } if label_shifting == 'A' else", "def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness = min(1,", "ax.plot(im[-1 - i], de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9),", "gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop]", "'daily': immobi = immobi.cumsum() deaths = deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction':", "((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1,", "= ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0))", "[jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False,", "ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666',", "ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs =", "fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction',", "y in [y1, y2, y3]: xx = [float(x[0]) + 0.125] + list(x[1:-1]) +", "df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax + sep,", "('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 - i], de[-1 - i], '.',", "'-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color)", "0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50,", "fraction', clip_on=False) x, y1, y2 = [], [], [] for i in range(9):", "* (1 if location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else:", "ng in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT:", "= f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day,", "'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\"", "import seaborn as sns import numpy as np import scipy.stats import statsmodels.stats.weightstats as", "fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes", "[z] for segi, seg in enumerate(segments): seg = seg.T if kind == 'cases':", "f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily': immobi = immobi.cumsum() deaths", "'-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for", "color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y], '-.', marker='8' if is_extra_country else", "( 20, 0.88 ), 'South Korea': ( 52, 0.59 ), 'Portugal': ( 0,", "len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches", "fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5,", "= [1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1],", "if df.index[0] <= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot", "trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666',", "zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$", "days to obtain visual discontinuities for missing_day in missing_days[country]: if df.index[0] <= missing_day", "linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots + thin black x, y =", "[y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location),", "' + str(thr_weekly_cases_per_1M) + '} ' r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes", "fronts for __ in _] finals_remaining = [(*im_de, loc) for loc, im_de in", "def by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT: variable thickness line", "STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc]", "else 'Last day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction',", "foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend", "PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100,", "wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$", "linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory into temporally-ordered", "for y in [y1, y2, y3]: xx = [float(x[0]) + 0.125] + list(x[1:-1])", "y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx +", "gdp_2020h1: print(f\"{loc}: missing GDP data in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob,", "/ population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y])", "$\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First", "xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for", "in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day,", "smooth joins nongreen_stretches += [ [index] ] elif last_index_is_green == False: nongreen_stretches[-1] +=", "fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in [y1,", "# PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True,", "points = np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) #", "0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '}", "0, 0) lwidths = [z] for segi, seg in enumerate(segments): seg = seg.T", "3000, 10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 =", "1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit)", "row_i = ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0))", "(0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina':", "optima and sd[1] > cutoff): front += [sd[2]] cutoff = sd[1] return front", "MIT Last changes: November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from operator", "fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()):", "0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40),", "plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width']", "thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green = None for index, value in", "if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1", "~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories,", "def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = [] for i in range(n_fronts): fronts_locations", "= color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha,", "in enumerate(z): for y in [y1, y2]: extra_shift = -0.08 if v in", "if country in colors.keys(): return colors[country] else: return dull_color def correlations(values, weights): rho", "'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1) for i in ii] return", "location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark': (940,", "[] points_eur, weights_eur = [], [] for loc in locations: if population(loc) <", "{cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif", "/ 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations", "loc not in ed_locations: print(f\"{loc} in figure {fig_name}: deaths will be used in", "markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1],", "'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color =", "pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1", "show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = [] for i", "in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <= last_avail_day", "deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0)", "fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M", "color='#666666') # DATA df = trajs_trimmed[country].copy() # DATA: begin each trajectory since 100", "'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if", "fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94),", "GDP data in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories,", "{fn}.\") return fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories,", "0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80),", "[(*im_de, loc) for loc, im_de in finals.items() if loc not in fronts_locations and", "cleanup=True, verbose=False): assert len(cols) == 2 col1, col2 = cols days_of_last_available_data = set()", "ax_leg.set_ylim((0, 1)) # tracer line for y in [y1, y2, y3]: xx =", "= sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols = 6 nrows = (len(locations))//ncols", "loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in", "= correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-',", "Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1,", "in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi),", "'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0,", "skipped in figure {fig_name}\") continue if loc not in ed_locations: print(f\"{loc} in figure", "= np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']]", "0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65),", "= df.index df.index = [i.replace(day=1) for i in ii] return df elif interval", "ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2, 14))", "fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s}", "0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84),", "if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False) def", "= ['mobility', 'new_deaths'] # set up the figure if show_corr_history: fig, axes =", "item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories,", "thickness line (BEGIN) lwidths = [1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1,", "in mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot be pd.NA because used in", "df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df) green_indices = df[df_freq <", "df.values, header='lockdown,gdp_loss', delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn)", "else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def", "min(days_of_last_available_data) if force_end is None: if verbose: print(f\"Last shared available day ({' &", "if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else:", "in range(0, 91, 10)] else: front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 +", "Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR)", "cleanup else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country]", "# too-many-lines \"\"\" This code features the article \"Pareto-based evaluation of national responses", "loc in STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths", "return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30, 100, 300, 1000,", "else: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d'))", "in [y1, y2, y3]: xx = [float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1])", "l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) ==", "seg in enumerate(segments): seg = seg.T el = min(1, 0.075 + ((lwidths[0][segi] -", "csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image as PDF fig.tight_layout()", "== 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([])", "elif index in nongreen_indices: if last_index_is_green is None or last_index_is_green == True: if", "in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def", "( 52, 0.59 ), 'Portugal': ( 0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland':", "ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for", "final_day <= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day,", "# PLOT: deaths in low-mortality locations if kind == 'deaths' and country in", "xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15,", "df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is None:", "np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for", "* register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True", "= 100 above_min_cumul_indices = df['total_cases'] >= min_cumul # cases even if kind ==", "xycoords='axes fraction', clip_on=False) x, y1, y2 = [], [], [] for i in", "5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept,", "in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def", "y in [y1, y2, y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5,", "import matplotlib.dates as dts import matplotlib.ticker as tckr import matplotlib.patheffects as pthff from", "str(thr_weekly_cases_per_1M) + '} ' r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5,", "[y1, y2]: extra_shift = -0.08 if v in [100, 300, 1000] else 0", "for _ in fronts for __ in _] finals_remaining = [(*im_de, loc) for", "= {} for loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de", "= [float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-',", "'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations,", "0) lwidths = [z] for segi, seg in enumerate(segments): seg = seg.T if", "optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left',", "== 'daily': immobi = immobi.cumsum() deaths = deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={", "'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois':", "& ~df[col2].isnull() ] if cleanup else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country,", "def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items(): if loc in", "features the article \"Pareto-based evaluation of national responses to COVID-19 pandemic shows that", "'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece':", "} if country in colors.keys(): return colors[country] else: return dull_color def correlations(values, weights):", "nrows = (len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for", "'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK')", "xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if panel_letter is", "in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi),", "index, value in df.iterrows(): if index in green_indices: if last_index_is_green is None or", "co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) #", "ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7,", "z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z", "df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose:", "marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33,", "show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations,", "fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def", "ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] *", "deaths\") if loc not in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because of", "green_stretches[-1] += [index] last_index_is_green = True elif index in nongreen_indices: if last_index_is_green is", "wrho = correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes", "return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor", "and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\",", "ncols = 6 nrows = (len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols,", "'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20,", "- 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if panel_letter is not", "dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day],", "= extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if", "skipped and country in skipped: continue df = trajs[country] df_sel = df[ ~df[col1].isnull()", "linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if", "return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax =", "file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30, 100,", "green_stretches[-1] += [index] # extra point for smooth joins nongreen_stretches += [ [index]", "ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths': #", "None: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if", "export coordinates if panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths',", "not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax,", "is None or last_index_is_green == True: if green_stretches: green_stretches[-1] += [index] # extra", "i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax", "\" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn =", "locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue", "' + str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes", "color=color) for i in range(1, len(im)): m, ms = [('s', 1.7), ('D', 1.55),", "0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0,", "skipped=None, cleanup=True, verbose=False): assert len(cols) == 2 col1, col2 = cols days_of_last_available_data =", "0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10,", "csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF fig.tight_layout()", "'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile':", "df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else df return day_of_last_available_data, edited_trajs def", "is None or last_index_is_green == False: green_stretches += [ [index] ] elif last_index_is_green", "= trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths", "left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related", "deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def", "+ lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi],", "spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom'", "data in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc,", "+ slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = [] for _, row in", "else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs,", "if cleanup else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory =", "rho, wrho = correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97),", "y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\",", "zorder=20, clip_on=False) # variable thickness line (END) points = np.array([x, y2]).T.reshape(-1, 1, 2)", "population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols =", "fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97),", "ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False,", "'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left'", "None for index, value in df.iterrows(): if index in green_indices: if last_index_is_green is", "= np.array(list(finals.values())) weights = np.array([population(loc) for loc in finals.keys()]) rho, wrho = correlations(values,", "0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if", "return (rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items():", "cutoff = sd[1] return front def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False,", "if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data", "['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if loc in excess_deaths else", "= (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False)", "plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85,", "'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico':", "# annotation: correlation coefficients values = np.array(list(finals.values())) weights = np.array([population(loc) for loc in", "= df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y,", "pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)),", "missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be pd.NA because used in", "elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left')", "values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0):", "ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness =", "if loc not in fronts_locations and loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining,", "df.index df.index = [i.replace(day=1) for i in ii] return df elif interval ==", "color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\",", "='D') ) # <-- THIS #de = de.set_index( de.index.shift(-days_back, freq ='D') ) #", "= plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4')", "in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in enumerate(fronts): color", "dots + thin black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o',", "+ 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0, 91, 10)] else: front_coords =", "= [(*im_de, loc) for loc, im_de in finals.items() if loc not in fronts_locations", "cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) == 2 col1, col2 = cols", "in label_shifts else label_shifts[location][0]), y**0.9999 * (1 if location not in label_shifts else", "100 above_min_cumul_indices = df['total_cases'] >= min_cumul # cases even if kind == 'deaths'", "heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\",", "set up the figure if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for", "in figure {fig_name}: deaths will be used in place of excess deaths\") if", "0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75,", "y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc))", "finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2,", "nongreen_stretches += [ [index] ] elif last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green", "= pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1')", "s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000',", "length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale)", "= trajs_trimmed[country].copy() # DATA: begin each trajectory since 100 cumulative cases min_cumul =", "== 'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc)", "min_cumul # cases even if kind == 'deaths' df = df[above_min_cumul_indices] # DATA:", "+ ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi],", "data on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v", "fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations if kind == 'deaths' and", "f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1) for i in ii]", "= [], [] points_eur, weights_eur = [], [] for loc in locations: if", "location not in label_shifts else label_shifts[location][0]), y**0.9999 * (1 if location not in", "for vi, v in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5,", "scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = [] for i in range(n_fronts):", "2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label,", "line for y in [y1, y2, y3]: xx = [float(x[0]) + 0.125] +", "title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor),", "front += [sd[2]] cutoff = sd[1] return front def put_final_dot(ax, location, x, y,", "# cannot be pd.NA because used in mpl.plot df.at[missing_day, Rt_col] = np.nan #", "[index] last_index_is_green = False stretches = [( g, SNAIL_GREEN ) for g in", "import register_matplotlib_converters import locale import dill import gzip from shared import * register_matplotlib_converters()", "lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1,", "0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), } if country in colors.keys(): return colors[country]", "for loc, spine in ax.spines.items(): if loc in spines: if loc == 'left':", "GDP data\") continue is_in_Europe = not loc in STATE_TO_ABBREV and not loc in", "[index] last_index_is_green = True elif index in nongreen_indices: if last_index_is_green is None or", "plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting", "not locate_set: print('Warning: US English locale could not be set. Check tick labels", "clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if italic: loc = r'\\textit{'", "if not loc in gdp_2020h1: print(f\"{loc}: missing GDP data in figure {fig_name}\") continue", "clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when", "min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index", "{:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33,", "= plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the", "= [1, 3, 10, 30, 100, 300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37", "for front_i, front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i -", "[intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = [] for _,", "0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67", "color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg):", "of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err", "xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} '", "= [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180", "of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for", "0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75,", "optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8) else: if optimal:", "# PLOT: dots + thin black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y,", "ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} ' r'new cases /week", "segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit", "shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs", "xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only:", "key=by_first_day) # PLOT: variable thickness line for stretch, color in stretches: x, y", "panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax =", "csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] # set", "zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country,", "fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations): ax", "markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2,", "label_shifts = { 'Denmark': (940, 1.0 ), 'Norway': ( 20, 0.88 ), 'South", "- 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable", "ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy() #", "i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0, 91,", "xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16", "10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0", "len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line for y in [y1, y2, y3]:", "green_stretches, nongreen_stretches = [], [] last_index_is_green = None for index, value in df.iterrows():", "in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT: variable", "figure {fig_name}\") continue if loc not in ed_locations: print(f\"{loc} in figure {fig_name}: deaths", "100, 300, 1000, 3000, 10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 =", "str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5,", "z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0,", "l=lightness, s=None) def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima) #", "fday <= last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts", "solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness line (BEGIN) lwidths = [0.7 *", "linewidth=1.1 if optimal else 0.8) else: if optimal: front_coords = [[front_coords[0][0] + 0.707*180", "points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit =", "linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction',", "numpy as np import scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt", "enumerate(segments): seg = seg.T el = min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co", "y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur,", "(0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California':", "linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8,", "front = [ sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd in sorted_data[1:]: if", "= -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig,", "return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1),", "zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) #", "ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major',", "markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5,", "0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0,", "True elif index in nongreen_indices: if last_index_is_green is None or last_index_is_green == True:", "0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness", "seg.T if kind == 'cases': el = 0.15 + lwidths[0][segi] / 8 else:", "labels in generated figures.') # -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5", "xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" +", "0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea':", "facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False,", "labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return", "0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), } if country in", "missing GDP data in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _ =", "SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15,", "(0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80,", "= [ sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd in sorted_data[1:]: if (optima", "trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) +", "as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig", "(Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" +", "color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes", "return front def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts", "ms = [('s', 1.7), ('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 - i],", "ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if", "partition trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert", "ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN) lwidths = [1*np.log(1 +", "0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88,", "= 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') #", "make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}',", "ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\")", "MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue if loc not in", "(0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2),", "+ 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy,", "else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True)", "xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values = np.array(list(finals.values())) weights = np.array([population(loc)", "'cases': assert last_day in trajs[cc].index, \\ print(f\"Day {last_day} not available for {cc} that", "'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru':", "= row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49,", "figure {fig_name} because of missing GDP data\") continue is_in_Europe = not loc in", "return fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations,", "i in range(1, len(im)): m, ms = [('s', 1.7), ('D', 1.55), ('p', 2.2)][i", "locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US", "r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax,", "['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i = ci//ncols if row_i == nrows-1:", "temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df)", "ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$')", "else: marker_size = 6 ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1,", "plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of", "Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands',", "<= last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and", "= immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index = [i.replace(day=1) for", "gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red +", "2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights = [],", "extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho", "(0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15),", "[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1)", "['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i =", "locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days,", "assert len(cols) == 2 col1, col2 = cols days_of_last_available_data = set() for country", "color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line (END) points =", "df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1)", "0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05),", "adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1, y2", "for i in range(0, 91, 10)] else: front_coords = [[front_coords[0][0] - 0.707*180 +", "= color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0,", "fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 = plot_gdp_drop_and_excess_deaths(trajectories, locations, jul01,", "final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc", "ppl = population(country) if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0)", "df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--',", "= plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout()", "[__ for _ in fronts for __ in _] finals_remaining = [(*im_de, loc)", "$\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) #", "40, 1.014), 'Michigan': (800, 1.018), 'Florida': ( 0, 0.987), 'Illinois': ( 90, 1.016),", "green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs): return cs[0][0]", "correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted',", "item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else:", "else: el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0],", "finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:' if", "plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations,", "np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export", "locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day,", "loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue if loc not in ed_locations: print(f\"{loc}", "), 'South Korea': ( 52, 0.59 ), 'Portugal': ( 0, 0.97 ), 'Bulgaria':", "3 light_color = color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1,", "fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction',", "== 'cases': # ==--- # PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75,", "ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line", "locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases': assert last_day", "if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i = ci//ncols if", "= 0.15 + lwidths[0][segi] / 8 else: el = 0.10 + lwidths[0][segi] /", "facecolor = '#f8f6f4' ncols = 6 nrows = (len(locations))//ncols + 1 fig, _", "z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z),", "( 0, 0.987), 'Illinois': ( 90, 1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania':", "color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\",", "df.at[missing_day, Rt_col] = np.nan # cannot be pd.NA because used in mpl.plot df.sort_index(inplace=True)", "300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable", "['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half of 2020')", "clip_on=True, zorder=10) # DATA: partition trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7,", "'__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 =", "in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day]", "0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 -", "va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"]", "ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue", "print(f\"{loc}: missing GDP data in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _", "'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00),", "elif kind == 'deaths': # ==--- days_back = 14 x, y = df[[mob_col,", "== len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches,", "Lipniacki (Scientific Reports, 2021). License: MIT Last changes: November 09, 2020 \"\"\" #", "jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <= last_avail_day panel_letter = chr(ord('A') + i)", "' r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when", "0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands':", "figures.') # -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5", "is_in_Europe = not loc in STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan', 'Japan',", "bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in", "0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round',", "skipped: continue df = trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day", "for segi, seg in enumerate(segments): seg = seg.T el = min(1, 0.075 +", "[index] ] elif last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green = False stretches", "np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z),", "va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week /M', xy=(0.5, 0.22), xycoords='axes fraction',", "+ f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\"", "def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30, 100, 300, 1000, 3000, 10000]", "& '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\",", "nongreen_stretches[-1] += [index] last_index_is_green = False stretches = [( g, SNAIL_GREEN ) for", "import numpy as np import scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as", "= set() for country in locations: if skipped and country in skipped: continue", "[y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color,", "in locations: if not loc in gdp_2020h1: print(f\"{loc}: missing GDP data in figure", "x, y1, y2 = [], [], [] for i in range(9): points, weights", "ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory", "fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction',", "plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5", "0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30,", "Check tick labels in generated figures.') # -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth']", "= correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ =", "l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots +", "in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def", "ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0']", "bottom_shift)) else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in", "'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR", "ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction',", "trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes", "clip_on=False) # variable thickness line (END) points = np.array([x, y2]).T.reshape(-1, 1, 2) segments", "/ 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round',", "color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes", "x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0", "continue gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob,", "in nongreen_indices: if last_index_is_green is None or last_index_is_green == True: if green_stretches: green_stretches[-1]", "xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death", "into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) ==", "months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item", "stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df) green_indices", "tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA:", "seg = seg.T el = min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co =", "xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail,", "if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$'))", "location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x", "for i in ii] return df elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum()", "ax.annotate(loc, xycoords='data', xy=(x + 65 - (0 if location not in label_shifts else", "1.06), xycoords='axes fraction', clip_on=False) x, y1, y2 = [], [], [] for i", "trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths': if", "va=\"center\") for vi, v in enumerate(z): for y in [y1, y2]: extra_shift =", "label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur", "last_index_is_green = False stretches = [( g, SNAIL_GREEN ) for g in green_stretches]", "np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z,", "per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1}", "print(f\"Saved figure file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10,", "{last_day} not available for {cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] /", "immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax =", "09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from operator import itemgetter from multiprocessing", "if label_shifting == 'A' else {} if show_population_halo: marker_size = 3.5 diameter =", "PLOT: variable thickness line for stretch, color in stretches: x, y = df.loc[stretch,", "np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [1*np.log(1 + z)] for", "0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50,", "['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax in", "and sd[1] > cutoff): front += [sd[2]] cutoff = sd[1] return front def", "seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black", "fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in", "locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US English locale could not be set.", "last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end", "optimal: front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8", "joins nongreen_stretches += [ [index] ] elif last_index_is_green == False: nongreen_stretches[-1] += [index]", "(0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00,", "ax.annotate(loc, xycoords='data', xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs,", "0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00),", "ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line for y in [y1,", "be pd.NA because used in mpl.plot df.sort_index(inplace=True) if kind == 'cases': # ==---", "cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on", "for g in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def", "(Scientific Reports, 2021). License: MIT Last changes: November 09, 2020 \"\"\" # --------------------------------------------------------------------------------------------------", "'UK'), xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location))", "points, weights = [], [] for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc,", "i in range(9): points, weights = [], [] for loc in locations: im_de", "= x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30)", "'Denmark': (940, 1.0 ), 'Norway': ( 20, 0.88 ), 'South Korea': ( 52,", "90, 1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825,", "thickness line (BEGIN) lwidths = [0.7 * (0 + np.log(z))] points = np.array([x,", "= ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <-- THIS #de = de.set_index( de.index.shift(-days_back,", "# manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN,", "def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria':", "thickness line (END) points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]],", "= np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T color", "0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia':", "0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15,", "z < 0, 0) lwidths = [1*np.log(1 + z)] for segi, seg in", "- 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)]", "else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2.,", "seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color,", "return colors[country] else: return dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho", "= sorted_data[0][1] for sd in sorted_data[1:]: if (optima and sd[1] < cutoff) or", "None or last_index_is_green == True: if green_stretches: green_stretches[-1] += [index] # extra point", "np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in", "place of excess deaths\") if loc not in gdp_2020h1: print(f\"{loc} skipped in figure", "of national responses to COVID-19 pandemic shows that saving lives and protecting economy", "'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99),", "> cutoff): front += [sd[2]] cutoff = sd[1] return front def put_final_dot(ax, location,", "final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig = trajs.copy()", "day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {}", "ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy()", "set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in", "shows that saving lives and protecting economy are non-trade-off objectives\" by Kochanczyk &", "family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------ # manual", "locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set =", "draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:' if show_corr_history else 'Last", "im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1],", "( 80, 0.92 ), 'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018), 'Florida': (", "np.array(list(finals.values())) weights = np.array([population(loc) for loc in finals.keys()]) rho, wrho = correlations(values, weights)", "THIS #de = de.set_index( de.index.shift(-days_back, freq ='D') ) # <-- not this z", "green_stretches: green_stretches[-1] += [index] # extra point for smooth joins nongreen_stretches += [", "cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT: variable thickness line for stretch, color", "colors[country] else: return dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho =", "'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily': immobi = immobi.cumsum()", "100 cumulative cases min_cumul = 100 above_min_cumul_indices = df['total_cases'] >= min_cumul # cases", "kind == 'cases': el = 0.15 + lwidths[0][segi] / 8 else: el =", "+ 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights)", "f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF fig.tight_layout() fn =", "plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3),", "missing GDP data\") continue is_in_Europe = not loc in STATE_TO_ABBREV and not loc", "jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 = plot_gdp_drop_and_excess_deaths(trajectories,", "deaths in the 1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations", "c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day,", "ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval ==", "= min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1],", "+ i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0,", "\\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2,", "yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes", "'==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {} assert len(cols) == 2 for", "or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue if loc not in ed_locations:", "optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending front = [", "from pandas.plotting import register_matplotlib_converters import locale import dill import gzip from shared import", "==--- # PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee',", "return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths': if cc", "0, 0) lwidths = [1*np.log(1 + z)] for segi, seg in enumerate(segments): seg", "fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom'])", "+ loc + r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x + 65", "df = df.set_index('location') for loc in locations: if not loc in gdp_2020h1: print(f\"{loc}:", "locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for", "'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United", "in df.iterrows(): location = row.name color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']]", "= min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True):", "death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new", "stretches = sorted(stretches, key=by_first_day) # PLOT: variable thickness line for stretch, color in", "0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), } if country", "trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind ==", "xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week /M',", "= np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer", "xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3, 10,", "0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15),", "'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain':", "> 1: ax.plot(*front_coords, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal", "'.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax,", "# export coordinates if panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values,", "statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt import matplotlib.dates as dts import matplotlib.ticker", "+ (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi],", "df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], []", "row.name color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10)", "loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color", "force_end is None: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'))", "ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week /M', xy=(0.5,", "0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50,", "def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness,", "color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5,", "0.15+0.1* front_coords = np.array([finals[loc] for loc in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords,", "clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases", "variable thickness line (END) points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1],", "zorder=20) # PLOT: dots + thin black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x,", "pandas.plotting import register_matplotlib_converters import locale import dill import gzip from shared import *", "= f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def", "if optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax,", "+ tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0)", "f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4'", "enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05,", "not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in enumerate(fronts):", "i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0, 180+91,", "+= [index] last_index_is_green = False stretches = [( g, SNAIL_GREEN ) for g", "( 0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio':", "else df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi", "if show_corr_history else 'Last day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01),", "== 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={", "re from operator import itemgetter from multiprocessing import Pool import pandas as pd", "range(n_fronts): fronts_locations = [__ for _ in fronts for __ in _] finals_remaining", "ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) #", "SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day)", "by Kochanczyk & Lipniacki (Scientific Reports, 2021). License: MIT Last changes: November 09,", "in enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8)", "(800, 1.018), 'Florida': ( 0, 0.987), 'Illinois': ( 90, 1.016), 'North Carolina': (-10,", "= row.name color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color,", "clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False)", "because used in mpl.plot df.sort_index(inplace=True) if kind == 'cases': # ==--- # PLOT:", "y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca =", "np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x,", "values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image", "edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else df return day_of_last_available_data,", "print(f\"Saved figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df", "auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia']", "'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia':", "2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca = df[['new_cases14' ]]", "(0.75, 0.50, 0.25), } if country in colors.keys(): return colors[country] else: return dull_color", "export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn,", "figure file {fn}.\") return fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as", "\\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} ' r'new cases /week /M', xy=(0.5, 0.31-0.09),", "{:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False,", "+ [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches =", "'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop +", "fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories,", "= df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else df return day_of_last_available_data, edited_trajs", "/week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in", "if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT:", "2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg =", "10, 30, 100, 300, 1000, 3000, 10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62", "Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit", "PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') #", "z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z,", "True: green_stretches[-1] += [index] last_index_is_green = True elif index in nongreen_indices: if last_index_is_green", "are optional) finals = {} for loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs,", "h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima)", "in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day)", "ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted", "that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind", "+ 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho", "plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00,", "de.set_index( de.index.shift(-days_back, freq ='D') ) # <-- not this z = de.join(ca) z['cases14_per_death14']", "# -------------------------------------------------------------------------------------------------- import re from operator import itemgetter from multiprocessing import Pool import", "fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations =", "low-mortality locations if kind == 'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88),", "points, weights = [], [] points_eur, weights_eur = [], [] for loc in", "= np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur)", "excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories,", "axis=1) for segi, seg in enumerate(segments): seg = seg.T el = min(1, 0.075", "axes = plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in enumerate(final_day): last_avail_day, trajs =", "fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df", "False: green_stretches += [ [index] ] elif last_index_is_green == True: green_stretches[-1] += [index]", "sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal))) # TMP: was 0.15+0.1* front_coords =", "not in label_shifts else label_shifts[location][0]), y**0.9999 * (1 if location not in label_shifts", "in _] finals_remaining = [(*im_de, loc) for loc, im_de in finals.items() if loc", "= seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-',", "Korea'] deaths = max(excess_deaths[loc] if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y", "trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175,", "loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in spines: ax.yaxis.set_ticks_position('left') else:", "xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho,", "delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure", "color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes", "else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y +", "0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for i in range(1, len(im)): m,", "(0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45,", "ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness line", "linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory into temporally-ordered stretches", "ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots + thin", "sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending front = [ sorted_data[0][2] ] cutoff", "(0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00),", "+ str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction',", "in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because of missing GDP data\") continue", "] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if", "fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94),", "kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b,", "== 'A' else {} if show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location)) *", "el = min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05,", "disable = E1101 # no-member (generated-members) #pylint: disable = C0302 # too-many-lines \"\"\"", "segi, seg in enumerate(segments): seg = seg.T el = 0.1 + (lwidths[0][segi] -", "in [y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o',", "wstats import matplotlib.pyplot as plt import matplotlib.dates as dts import matplotlib.ticker as tckr", "= df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1],", "values = np.array(list(finals.values())) weights = np.array([population(loc) for loc in finals.keys()]) rho, wrho =", "immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\",", "if loc == 'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else:", "for i, fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert", "figure file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3, 10, 30,", "cannot be pd.NA because used in mpl.plot df.sort_index(inplace=True) if kind == 'cases': #", "/ M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights = [], []", "{} assert len(cols) == 2 for country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country]", "= np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0,", "in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal else '--', c=color,", "ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()])", "/week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing',", "xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black')", "def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending front", "= { 'Denmark': (940, 1.0 ), 'Norway': ( 20, 0.88 ), 'South Korea':", "pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for", "(0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20,", "y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1,", "# tracer line for y in [y1, y2, y3]: xx = [float(x[0]) +", "sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt',", "'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h", "), 'Portugal': ( 0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92", "print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end", "z = [3, 10, 30, 100, 300, 1000, 3000, 10000] x = np.array(list(range(len(z))))", "pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) #", "ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$',", "'left' in spines: ax.yaxis.set_ticks_position('left') else: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([])", "(ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert", "total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot", "va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\",", "US English locale could not be set. Check tick labels in generated figures.')", "in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col,", "marker_size = 3.5 diameter = np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x], [y],", "yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1,", "xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail,", "in place of excess deaths\") if loc not in gdp_2020h1: print(f\"{loc} skipped in", "markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted", "linewidth=1.1 if optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None):", "len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if", "= jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <= last_avail_day fig, axes = plt.subplots(ncols=1,", "':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8) else:", "'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark':", "put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h =", "= df['total_cases'] >= min_cumul # cases even if kind == 'deaths' df =", "trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else df return", "+ 0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1,", "fronts.append(front) for front_i, front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i", "last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is None: if verbose: print(f\"Last shared available", "visual discontinuities for missing_day in missing_days[country]: if df.index[0] <= missing_day and missing_day <=", "_ in fronts for __ in _] finals_remaining = [(*im_de, loc) for loc,", "== True: if green_stretches: green_stretches[-1] += [index] # extra point for smooth joins", "* scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data = sorted(data,", "'Switzerland': ( 80, 0.92 ), 'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018), 'Florida':", "c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8) else: if optimal: front_coords = [[front_coords[0][0]", "plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width']", "== 'deaths': # ==--- days_back = 14 x, y = df[[mob_col, Rt_col]].values.T points", "np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [z] for segi, seg", "cutoff = sorted_data[0][1] for sd in sorted_data[1:]: if (optima and sd[1] < cutoff)", "if v in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift +", "3, 10, 30, 100, 300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1))", "interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country)", "if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df", "DATA df = trajs_trimmed[country].copy() # DATA: begin each trajectory since 100 cumulative cases", "for y in [y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy,", "gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in", "14 x, y = df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments", "y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x,", "alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\")", "= population(country) if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) /", "r'\\textit{' + loc + r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x +", "and not loc in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if", "y = df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments =", "fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day,", "['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)')", "ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights = []", "zorder=10) # DATA: partition trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\", "(825, 0.991) } if label_shifting == 'A' else {} if show_population_halo: marker_size =", "(0.45, 0.75, 1.00), 'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15,", "clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax)", "plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations,", "0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35,", "y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN) lwidths", "linewidth=0.8, alpha=alpha, color=color) for i in range(1, len(im)): m, ms = [('s', 1.7),", "np.array([finals[loc] for loc in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal", "in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal))) # TMP:", "+ i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter", "= pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1", "y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False,", "loc + r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x + 65 -", "np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca", "from colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale import dill import", "fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig if __name__ ==", "0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co,", "'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00),", "0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92 ), 'Ohio': (", "population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day,", "- 1*optimal))) # TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for loc in front]).T", "cols days_of_last_available_data = set() for country in locations: if skipped and country in", "be used in place of excess deaths\") if loc not in gdp_2020h1: print(f\"{loc}", "0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65,", "== 2 for country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull()", "xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values = np.array(list(finals.values())) weights", "[i.replace(day=1) for i in ii] return df elif interval == 'weekly': immobi =", "ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06),", "italic=False): label_shifts = { 'Denmark': (940, 1.0 ), 'Norway': ( 20, 0.88 ),", "PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10)", "va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3, 10, 30, 100, 300]", "5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020", "in skipped: continue df = trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ]", "count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False,", "(0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65,", "5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\"", "+ 0.04), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False):", "immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\",", "in ii] return df elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths =", "'cases': # ==--- # PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1,", "= 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] =", "i, fday in enumerate(final_day): last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday", "+ r'} ' r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\",", "sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots", "1.0 ), 'Norway': ( 20, 0.88 ), 'South Korea': ( 52, 0.59 ),", "l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location), l=0.3),", "fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 =", "print(f\"{loc} skipped in figure {fig_name}\") continue if loc not in ed_locations: print(f\"{loc} in", "= { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia':", "points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN,", "= sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1,", "length=1.) def darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None,", "discontinuities for missing_day in missing_days[country]: if df.index[0] <= missing_day and missing_day <= FINAL_DAY:", "elif last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green = True elif index in", "sorted_data[0][1] for sd in sorted_data[1:]: if (optima and sd[1] < cutoff) or (not", "and sd[1] < cutoff) or (not optima and sd[1] > cutoff): front +=", "- 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt',", "death /week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v", "skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases': assert last_day in trajs[cc].index, \\ print(f\"Day", "if green_stretches: green_stretches[-1] += [index] # extra point for smooth joins nongreen_stretches +=", "= C, R #pylint: disable = E1101 # no-member (generated-members) #pylint: disable =", "= chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{'", "i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name)", "df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <-- THIS #de", "ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax in fig.axes: if ax.is_last_row()", "final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1", "<= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be pd.NA", "freq ='D') ) # <-- THIS #de = de.set_index( de.index.shift(-days_back, freq ='D') )", "adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i", "label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9))", "rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's", "i)/360*2*3.14159)] for i in range(0, 91, 10)] else: front_coords = [[front_coords[0][0] - 0.707*180", "= f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig if __name__ == '__main__':", "== nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT:", "country, interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl =", "tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE", "xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1, y2 = [], [], [] for", "as dts import matplotlib.ticker as tckr import matplotlib.patheffects as pthff from colorsys import", "markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if italic: loc = r'\\textit{' + loc", "_] finals_remaining = [(*im_de, loc) for loc, im_de in finals.items() if loc not", "+= [index] # extra point for smooth joins nongreen_stretches += [ [index] ]", "def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax + sep, sep))", "= np.nan # cannot be pd.NA because used in mpl.plot df.at[missing_day, Rt_col] =", "x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333',", "optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:' if show_corr_history else", "else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'],", "left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1, y2 = [], [],", "protecting economy are non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific Reports, 2021). License:", "markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0,", "in sorted_data[1:]: if (optima and sd[1] < cutoff) or (not optima and sd[1]", "cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day,", "'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col,", ") for g in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches]", "sorted(stretches, key=by_first_day) # PLOT: variable thickness line for stretch, color in stretches: x,", "0.60), 'Peru': (0.75, 0.50, 0.25), } if country in colors.keys(): return colors[country] else:", "0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'}", "optimal=False) # annotation: last day ax.annotate(str('Date:' if show_corr_history else 'Last day:') + \\", "'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New York': (0.60, 0.30,", "loc in spines: if loc == 'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom':", "colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country,", "correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho)", "xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\",", "np.nan # cannot be pd.NA because used in mpl.plot df.at[missing_day, Rt_col] = np.nan", "last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <= last_avail_day fig, axes", "_{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z =", "per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' +", "0.991) } if label_shifting == 'A' else {} if show_population_halo: marker_size = 3.5", "- i], de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color,", "r'} ' r'new cases /week /M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\")", "= correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction',", "for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X',", "darken(color, scale=0.5): lightness = min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None)", "fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values = np.array(list(finals.values())) weights = np.array([population(loc) for", "for i in range(n_fronts): fronts_locations = [__ for _ in fronts for __", "the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept,", "for loc in locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in", "plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01,", "adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items(): if loc in spines:", "'POSIX') if not locate_set: print('Warning: US English locale could not be set. Check", "[0.7 * (0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2) segments =", "ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week", "plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn", "= darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for i", "could not be set. Check tick labels in generated figures.') # -- Shared", "chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' +", "* (0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1],", "/ pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5,", "for i in range(1, len(im)): m, ms = [('s', 1.7), ('D', 1.55), ('p',", "{final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved", "i in range(0, 91, 10)] else: front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180", "mobility reduction in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st}", "'#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5),", "population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc))", "verbose=False): assert len(cols) == 2 col1, col2 = cols days_of_last_available_data = set() for", "= scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights", "put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark':", "y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness line (BEGIN)", "va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for", "de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation:", "color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color,", "for loc in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal else", "0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\",", "day_of_last_available_data = min(days_of_last_available_data) if force_end is None: if verbose: print(f\"Last shared available day", "export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b", "+ 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy,", "for loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de)", "+= [ [index] ] elif last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green =", "= de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0)", "'Norway': ( 20, 0.88 ), 'South Korea': ( 52, 0.59 ), 'Portugal': (", "changes: November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from operator import itemgetter", "day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths", "0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia':", "X-axis row_i = ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100,", "Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR)", "days_back = 14 x, y = df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1,", "trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if", "linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y],", "8 else: el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el)", "spines, left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items(): if loc in spines: if", "matplotlib.pyplot as plt import matplotlib.dates as dts import matplotlib.ticker as tckr import matplotlib.patheffects", "usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT", "xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values,", "was 0.15+0.1* front_coords = np.array([finals[loc] for loc in front]).T if len(front_coords.T) > 1:", "0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\",", "else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax in fig.axes: if ax.is_last_row() and", "= \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's", "[ [index] ] elif last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green = False", "'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65,", "alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y], '-.', marker='8' if", "= [], [] for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 -", "= df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green = None for", "day_of_last_available_data = force_end edited_trajs = {} assert len(cols) == 2 for country in", "ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100)", "pd.NA because used in mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot be pd.NA", "import matplotlib.patheffects as pthff from colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters import", "return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax +", "(-10, 0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991) } if label_shifting", "location = row.name color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop],", "ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black',", "loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left',", "fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the", "show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 =", "(year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T)", "fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\",", "] elif last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green = False stretches =", "ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1])", "force_end edited_trajs = {} assert len(cols) == 2 for country in locations: df", "yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1,", "# ==--- # PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round',", "in STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths =", "'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan':", "plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35),", "else: return dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values,", "ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day,", "seg = seg.T if kind == 'cases': el = 0.15 + lwidths[0][segi] /", "0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week", "else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <= last_avail_day fig,", "f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1,", "Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de", "1: ax.plot(*front_coords, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else", "xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',')", "y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88),", "'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning:", "else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i = ci//ncols if row_i ==", "fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col()", "1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points,", "= sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) #", "linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of", "np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01,", "0.40, 0.00), 'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50,", "'--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations,", "zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\",", "trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\",", "for loc in locations: if not loc in gdp_2020h1: print(f\"{loc}: missing GDP data", "str(t**2) + '}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail,", "+ (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False)", "loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho =", "0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days,", "locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set:", "plot \"flares\" (tails are optional) finals = {} for loc in locations: im,", "+ thin black for y in [y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx", "lwidths = [z] for segi, seg in enumerate(segments): seg = seg.T if kind", "0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio':", "[] for _, row in df.iterrows(): location = row.name color = color_of(location) mob_red,", "+ 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i", "front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0, 91, 10)]", "ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN) lwidths = [1*np.log(1 + np.array(z))] points", "ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days", "in low-mortality locations if kind == 'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5,", "else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8) else: if optimal: front_coords", "14)) slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept +", "l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths':", "(BEGIN) lwidths = [0.7 * (0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1,", "[y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black',", "0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00),", "if force_end is None: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\",", "as pthff from colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale import", "is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility',", "y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations,", "ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are", "ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind),", "np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T el =", "linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths': # ==--- days_back = 14", "de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z,", "trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths =", "segi, seg in enumerate(segments): seg = seg.T el = min(1, 0.075 + ((lwidths[0][segi]", "axis=1) for segi, seg in enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15", "= pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in locations: if not", "Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export", "= cols days_of_last_available_data = set() for country in locations: if skipped and country", "[('s', 1.7), ('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 - i], de[-1 -", "verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is None: if verbose: print(f\"Last", "== 2 col1, col2 = cols days_of_last_available_data = set() for country in locations:", "y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of", "xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in", "correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \"", "sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$' for t in", "fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} ' r'new", "$\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00)", "TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for loc in front]).T if len(front_coords.T) >", "dts import matplotlib.ticker as tckr import matplotlib.patheffects as pthff from colorsys import rgb_to_hls", "fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97),", "seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths': # ==---", "last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_')", "SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)):", "+ i)/360*2*3.14159), front_coords[1][0] - 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0,", "\"flares\" (tails are optional) finals = {} for loc in locations: im, de", "optimal): fronts = [] for i in range(n_fronts): fronts_locations = [__ for _", "el = 0.1 + (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1],", "= [], [], [] for i in range(9): points, weights = [], []", "ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5,", "'-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line (END) points", "seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots + thin black", "for smooth joins nongreen_stretches += [ [index] ] elif last_index_is_green == False: nongreen_stretches[-1]", "ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5,", "wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine", "make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional) finals = {} for loc in", "df.sort_index(inplace=True) if kind == 'cases': # ==--- # PLOT: pink tracer line ax.plot(*df[[mob_col,", "colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00),", "'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia':", "ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()])", "else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\",", "missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be pd.NA because", "force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) == 2 col1, col2 = cols days_of_last_available_data", "- i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho = correlations(points, weights)", "= \" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export", "country in skipped: continue df = trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull()", "= 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-',", "= \" + f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn", "set() for country in locations: if skipped and country in skipped: continue df", "0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop =", "0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes", "markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o',", "y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000',", "show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in enumerate(final_day): last_avail_day,", "range(0, 91, 10)] else: front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159),", "'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777'", "elif kind == 'deaths': if cc in low_mortality_locations: return trajs[cc].loc[last_day, f\"total_{kind}\"] / 1e9", "show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark': (940, 1.0 ), 'Norway': ( 20,", "ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5,", "zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per", "0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60),", "['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1, y2 =", "national responses to COVID-19 pandemic shows that saving lives and protecting economy are", "p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa',", "# <-- THIS #de = de.set_index( de.index.shift(-days_back, freq ='D') ) # <-- not", "facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False,", "label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols,", "20, 0.88 ), 'South Korea': ( 52, 0.59 ), 'Portugal': ( 0, 0.97", "color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots + thin black x, y", "green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches =", "]] ca = ca.set_index( ca.index.shift(+days_back, freq ='D') ) # <-- THIS #de =", "ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index =", "value in df.iterrows(): if index in green_indices: if last_index_is_green is None or last_index_is_green", "objectives\" by Kochanczyk & Lipniacki (Scientific Reports, 2021). License: MIT Last changes: November", "markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if italic:", "#ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1,", "plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95,", "im_de in finals.items() if loc not in fronts_locations and loc not in OUT_OF_FRONT]", "(0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00,", "= 14 x, y = df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2)", "min(1, rgb_to_hls(*color[0:3])[1] * scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data", "in enumerate(segments): seg = seg.T el = 0.1 + (lwidths[0][segi] - 0.)/14 color", "lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1,", "= 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True)", "Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991) } if", "-gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe)", "trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <= last_avail_day fig, axes =", "i in range(n_fronts): fronts_locations = [__ for _ in fronts for __ in", "= x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30)", "clip_on=False) # dots + thin black for y in [y1, y2, y3]: xx,", "label_shifts[location][0]), y**0.9999 * (1 if location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3),", "'Italy': plt.cm.tab10(2), 'Netherlands': (0.88, 0.50, 0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal':", "+ r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2])", "that saving lives and protecting economy are non-trade-off objectives\" by Kochanczyk & Lipniacki", "stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2)", "ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations if kind == 'deaths' and country", "0.8) else: if optimal: front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159),", "not in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because of missing GDP data\")", "return df elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() /", "pd import seaborn as sns import numpy as np import scipy.stats import statsmodels.stats.weightstats", "tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z =", "is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = { 'Denmark': (940, 1.0 ), 'Norway': (", "point for smooth joins nongreen_stretches += [ [index] ] elif last_index_is_green == False:", "Rt_col] = np.nan # cannot be pd.NA because used in mpl.plot df.sort_index(inplace=True) if", "xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=1.33, foreground=facecolor), pthff.Normal()]) adjust_spines(ax,", "plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad']", "'#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria':", "= seg.T el = 0.1 + (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el)", "#de = de.set_index( de.index.shift(-days_back, freq ='D') ) # <-- not this z =", "# <-- not this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z", "alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United Kingdom', 'UK') if italic: loc =", "solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y in [y2]: xx,", "values_eur = np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur,", "[], [] for loc in locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc}", "adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year", "'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35,", "52, 0.59 ), 'Portugal': ( 0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': (", "'Portugal': ( 0, 0.97 ), 'Bulgaria': (830, 0.994), 'Switzerland': ( 80, 0.92 ),", "'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP", "x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx", "enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data',", "trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq)", "np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values", "will be used in place of excess deaths\") if loc not in gdp_2020h1:", "sd[1] > cutoff): front += [sd[2]] cutoff = sd[1] return front def put_final_dot(ax,", "+ '}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history,", "1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels()", "%d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted", "-0.08 if v in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift", "[mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases': assert last_day in", "seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi] - 0.)/8) ax_leg.plot(seg[0]+0.05, seg[1],", "fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week /M', xy=(0.5,", "for y in [y1, y2]: extra_shift = -0.08 if v in [100, 300,", "/ 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round',", "tracer line for y in [y1, y2, y3]: xx = [float(x[0]) + 0.125]", "ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20):", "total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5,", "0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [1*np.log(1 +", "in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc,", "half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half of", "testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z):", "trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01 =", "y2, y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o',", "'UK') if italic: loc = r'\\textit{' + loc + r'}' if label_shifting ==", "2 col1, col2 = cols days_of_last_available_data = set() for country in locations: if", "sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$' for t in ticks[1:]])", "= [('s', 1.7), ('D', 1.55), ('p', 2.2)][i % 3] ax.plot(im[-1 - i], de[-1", "= df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z = 0.7*np.log(0", "fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz',", "1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25,", "f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily': immobi = immobi.cumsum() deaths = deaths.cumsum()", "put_legend_deaths(ax_leg): z = [1, 3, 10, 30, 100, 300] x = np.array(list(range(len(z)))) y2", "segi, seg in enumerate(segments): seg = seg.T color = sns.set_hls_values(SNAIL_NONGREEN, l=0.15 + (lwidths[0][segi]", "import dill import gzip from shared import * register_matplotlib_converters() locate_set = False try:", "'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True", "show_corr_history else 'Last day:') + \\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes", "fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction',", "locale could not be set. Check tick labels in generated figures.') # --", "va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5,", ">= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green = None for index, value", "r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--',", "else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color, scale=0.5): lightness", "label_shifting == 'A' else {} if show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location))", "/ ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii = df.index df.index", "points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000)", "population(country) assert len(df_freq) == len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq", "\"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False, zorder=100,", "= trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77),", "# plot \"flares\" (tails are optional) finals = {} for loc in locations:", "index in green_indices: if last_index_is_green is None or last_index_is_green == False: green_stretches +=", "1} new death /week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for", "Reports, 2021). License: MIT Last changes: November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import", "= f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day,", "alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40)", "import rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale import dill import gzip from", "loc in locations: if not loc in gdp_2020h1: print(f\"{loc}: missing GDP data in", "= False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME,", "None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] #", "pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 +", "loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax,", "= \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's", "finals, n_fronts, optimal): fronts = [] for i in range(n_fronts): fronts_locations = [__", "ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in", "M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights = [], [] points_eur,", "obtain visual discontinuities for missing_day in missing_days[country]: if df.index[0] <= missing_day and missing_day", "file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location", "\\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week /M', xy=(0.5, 0.62-0.09),", "= np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d", "clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275,", "extra point for smooth joins nongreen_stretches += [ [index] ] elif last_index_is_green ==", "excess deaths\") if loc not in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because", "fronts = [] for i in range(n_fronts): fronts_locations = [__ for _ in", "set_ticks_lengths(ax) # PLOT: legend for ax in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off()", "missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 =", "ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}", "cannot be pd.NA because used in mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot", "xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0,", "the figure if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for i, fday", "marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) #", "== 'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666',", "functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] #", "front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal else '--', c=color, alpha=0.8,", "preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece',", "np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths =", "for segi, seg in enumerate(segments): seg = seg.T if kind == 'cases': el", "if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax in fig.axes: if", "center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label", "linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness line (BEGIN) lwidths", "segments = np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca = df[['new_cases14' ]] ca", "in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for ng in nongreen_stretches] def by_first_day(cs): return", "finals.items() if loc not in fronts_locations and loc not in OUT_OF_FRONT] front =", "show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = [] for", "'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths'])", "= immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths':", "'Georgia': (825, 0.991) } if label_shifting == 'A' else {} if show_population_halo: marker_size", "in locations: if skipped and country in skipped: continue df = trajs[country] df_sel", "sep=5): ax.set_ylim((0, ymax)) ticks = list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] +", "np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T color =", "thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia',", "0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25),", "locations, cols, force_end=fday) assert fday <= last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i],", "wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2,", "= df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [],", "< thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green", "immobi = immobi.cumsum() deaths = deaths.cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\",", "= sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT:", "optional) finals = {} for loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc,", "+ [r'$\\sqrt{' + str(t**2) + '}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations,", "(0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]],", "rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's", "0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25, 0.70), 'Hungary':", "file {fn}.\") return fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f:", "def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A', italic=False): label_shifts = {", "xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\")", "+ list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee',", "'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), }", "'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany': (0.55, 0.25,", "color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases',", "14)) make_sqrt_deaths_yaxis(ax) ed_locations = excess_deaths.keys() points, weights = [], [] points_eur, weights_eur =", "df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df) green_indices =", "pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d}", "0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co,", "2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from operator import itemgetter from multiprocessing import", "in the 1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax) ed_locations =", "color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols)", "l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots +", "0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New York': (0.60,", "{ 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4),", "locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn)", "{final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values", "0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for", "30, 100, 300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1))", "dull_color def correlations(values, weights): rho = scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return", "bottom_shift=0): for loc, spine in ax.spines.items(): if loc in spines: if loc ==", "deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional) finals = {} for loc", "locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig =", "if (optima and sd[1] < cutoff) or (not optima and sd[1] > cutoff):", "linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy() # DATA: begin each trajectory since", "force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases': assert last_day in trajs[cc].index, \\", "'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set", "#pylint: disable = E1101 # no-member (generated-members) #pylint: disable = C0302 # too-many-lines", "1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0, 91, 10)] else: front_coords = [[front_coords[0][0]", "1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value,", "'rb') as f: trajectories, locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:',", "[float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75,", "['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax in fig.axes: if ax.is_last_row() and ax.is_last_col():", "used in mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot be pd.NA because used", "(1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South", "for stretch, color in stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points =", "= f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] # set up", "/ population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols", "# TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for loc in front]).T if len(front_coords.T)", "front_i, front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal)))", "0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55,", "is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc = location.replace('United", "gzip from shared import * register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL,", "np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [1*np.log(1", "elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l,", "gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because of missing GDP data\") continue is_in_Europe", "[y1, y2, y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025,", "fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def", "show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction',", "r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5,", "fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z = [3,", "~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data", "black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True,", "weights = np.array([population(loc) for loc in finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:',", "linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed',", "points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T el = min(1,", "nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green = None", "'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland':", "ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <=", "va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert", "np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000),", "in ax.spines.items(): if loc in spines: if loc == 'left': spine.set_position(('outward', left_shift)) elif", "missing_days[country]: if df.index[0] <= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan #", "= [( g, SNAIL_GREEN ) for g in green_stretches] \\ + [(ng, SNAIL_NONGREEN)", "locations, cols, force_end=final_day) assert final_day <= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5))", "loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail: color = color_of(loc) darker_color = darken(color_of(loc)) alpha", "linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5,", "%Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values = np.array(list(finals.values()))", "black for y in [y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5,", "fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 =", "segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in enumerate(segments): seg = seg.T", "x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness", "# DATA: nullify missing days to obtain visual discontinuities for missing_day in missing_days[country]:", "fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at least 1} new death /week /M', xy=(0.5, 0.22),", "'Michigan': (800, 1.018), 'Florida': ( 0, 0.987), 'Illinois': ( 90, 1.016), 'North Carolina':", "to obtain visual discontinuities for missing_day in missing_days[country]: if df.index[0] <= missing_day and", "(lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1,", "<= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be pd.NA because used in mpl.plot", "0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1),", "plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 = plot_gdp_drop_and_excess_deaths(trajectories, locations, jul01, excess_deaths, gdp_2020h1, fig_name='5')", "figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc]", "ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind ==", "len(cols) == 2 col1, col2 = cols days_of_last_available_data = set() for country in", "[] for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0],", "= [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax)", "delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF", "plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font',", "[( g, SNAIL_GREEN ) for g in green_stretches] \\ + [(ng, SNAIL_NONGREEN) for", "show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last day", "0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--',", "df = df[above_min_cumul_indices] # DATA: nullify missing days to obtain visual discontinuities for", "in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if loc in excess_deaths", "on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths':", "except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL,", "loc in gdp_2020h1: print(f\"{loc}: missing GDP data in figure {fig_name}\") continue gdp_drop =", "plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80,", "skipped in figure {fig_name} because of missing GDP data\") continue is_in_Europe = not", "b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:'", "segi, seg in enumerate(segments): seg = seg.T if kind == 'cases': el =", "population(country) if interval == 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl", "front_coords = [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 +", "begin each trajectory since 100 cumulative cases min_cumul = 100 above_min_cumul_indices = df['total_cases']", "loc not in fronts_locations and loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal)", "if loc not in ed_locations: print(f\"{loc} in figure {fig_name}: deaths will be used", "y in [y1, y2]: extra_shift = -0.08 if v in [100, 300, 1000]", "in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\",", "low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total", "register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except:", "axes[i].annotate(r'\\large\\textbf{' + panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92,", "xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) +", "import itemgetter from multiprocessing import Pool import pandas as pd import seaborn as", "- 0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01),", "std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5)", "above_min_cumul_indices = df['total_cases'] >= min_cumul # cases even if kind == 'deaths' df", "print(f\"Saved figure file {fn}.\") return fig if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb')", "np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] # set up the figure", "ax.set_xticks(()) continue # PLOT: X-axis row_i = ci//ncols if row_i == nrows-1: ax.set_xlabel('Mobility',", "color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory into temporally-ordered stretches df_freq = df[f\"new_{kind}\"].ffill().rolling(window=7,", "\" + f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates", "'.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o',", "0.987), 'Illinois': ( 90, 1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0,", "({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available day ({' &", "from shared import * register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US')", "ci, country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality", "last_day in trajs[cc].index, \\ print(f\"Day {last_day} not available for {cc} that ends on\",", "ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666') # DATA df = trajs_trimmed[country].copy() # DATA: begin", "(0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35,", "force_end=final_day) assert final_day <= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs,", "if kind == 'cases': # ==--- # PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T,", "gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP", "ticks = list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2)", "alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=True, zorder=10) # DATA: partition trajectory into temporally-ordered stretches df_freq", "else: front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8", "> 10000), 10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z),", "f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \"", "and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b -", "'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95, 0.25, 0.75), 'Germany':", "weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted)", "draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals, n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:'", "'}$' for t in ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo,", "[y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30,", "trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day,", "= -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not", "stretches = [( g, SNAIL_GREEN ) for g in green_stretches] \\ + [(ng,", "(0.00, 0.10, 0.65), 'Ecuador': (0.65, 0.65, 0.00), 'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75,", "in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100)", "xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5,", "weights = [], [] points_eur, weights_eur = [], [] for loc in locations:", "(rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine in ax.spines.items(): if", "None or last_index_is_green == False: green_stretches += [ [index] ] elif last_index_is_green ==", "values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] # set up the figure if", "f\"total_{kind}\"] / 1e9 + 1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc)", "0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9, 0.00,", "from operator import itemgetter from multiprocessing import Pool import pandas as pd import", "1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values = np.array(list(finals.values())) weights =", "zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False,", "el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(color, l=el) ax.plot(seg[0], seg[1],", "xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name,", "weights = [] for _, row in df.iterrows(): location = row.name color =", "loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in", "locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points =", "ii] return df elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum()", "plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax,", "return df elif interval == 'daily': immobi = immobi.cumsum() deaths = deaths.cumsum() /", "day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available day ({'", "This code features the article \"Pareto-based evaluation of national responses to COVID-19 pandemic", "optima) # x-ascending front = [ sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd", "df return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi =", "'--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8) else: if optimal: front_coords =", "yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False,", "fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn", "label_shifts else label_shifts[location][0]), y**0.9999 * (1 if location not in label_shifts else label_shifts[location][1])),", "for loc, im_de in finals.items() if loc not in fronts_locations and loc not", "0, 0.987), 'Illinois': ( 90, 1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania': (", "extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl", "labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col():", "(1 if location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc,", "Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations)", "xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if panel_letter is not None: csv_fn =", "'tests_per_hit'].values np.place(tests_per_hit, np.isinf(tests_per_hit) | (tests_per_hit > 10000), 10000) z = 0.7*np.log(0 + tests_per_hit)", "xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country)", "loc, im_de in finals.items() if loc not in fronts_locations and loc not in", "legend for ax in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind ==", "markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha)", "= seg.T if kind == 'cases': el = 0.15 + lwidths[0][segi] / 8", "loc not in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because of missing GDP", "{ 'Denmark': (940, 1.0 ), 'Norway': ( 20, 0.88 ), 'South Korea': (", "immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"})", "= fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B", "for _, row in df.iterrows(): location = row.name color = color_of(location) mob_red, gdp_drop", "label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y + 0.04),", "np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line (BEGIN) lwidths = [1*np.log(1", "(0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00,", "clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3, 10, 30, 100, 300] x =", "0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] =", "zorder=20, clip_on=False) # dots + thin black for y in [y1, y2, y3]:", "len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True,", "fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y in [y2]:", "+ 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5,", "ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75) for item in (ax.xaxis.label, ax.yaxis.label):", "[sd[2]] cutoff = sd[1] return front def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False,", "enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal))) # TMP: was", "zorder=20) elif kind == 'deaths': # ==--- days_back = 14 x, y =", "panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file", "0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom': (0.20, 0.00, 0.99), 'Japan': (0.9,", "correlation coefficients values = np.array(list(finals.values())) weights = np.array([population(loc) for loc in finals.keys()]) rho,", "kind == 'cases': assert last_day in trajs[cc].index, \\ print(f\"Day {last_day} not available for", "49, gdp_drop + 0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho =", "linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y in", "clip_on=False, zorder=10) # variable thickness line (BEGIN) lwidths = [0.7 * (0 +", "[], [] for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i]", "= np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) # variable thickness line", "+ i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else", "- 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt',", "zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\") ax_leg.annotate(s=r'when \\textbf{at", "0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33", "[], [], [] for i in range(9): points, weights = [], [] for", "= de.set_index( de.index.shift(-days_back, freq ='D') ) # <-- not this z = de.join(ca)", "), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991) } if label_shifting == 'A'", "matplotlib.dates as dts import matplotlib.ticker as tckr import matplotlib.patheffects as pthff from colorsys", "df elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl", "pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending front =", "print(f\"Day {last_day} not available for {cc} that ends on\", trajs[cc].tail(1).index) return trajs[cc].loc[last_day, f\"total_{kind}\"]", "= z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0)", "0.028), xycoords='data', color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:',", "= True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US English", "\"\"\" This code features the article \"Pareto-based evaluation of national responses to COVID-19", "axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') # export", "thickness line for stretch, color in stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T", "missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations", "jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True,", "final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts", "'new_deaths'] # set up the figure if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11,", "np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1))", "= plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations): ax = fig.axes[ci]", "= np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg", "'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20,", "import pandas as pd import seaborn as sns import numpy as np import", "fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) ax.annotate(s=\"(plot not shown)\", xy=(0.5, 0.67), xycoords='axes", "settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] =", "reduction in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half", "0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New York':", "facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT:", "finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted)", "ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop + 0.028),", "np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\"", "[] for loc in locations: if population(loc) < MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped", "last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0,", "import gzip from shared import * register_matplotlib_converters() locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US')", "economy are non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific Reports, 2021). License: MIT", "deaths = max(excess_deaths[loc] if loc in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y =", "OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in enumerate(fronts): color =", "range(9): points, weights = [], [] for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs,", "ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$')", "0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20),", "df[above_min_cumul_indices] # DATA: nullify missing days to obtain visual discontinuities for missing_day in", ") # <-- not this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14']", "= f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def put_legend_cases(ax_leg,", "v in enumerate(z): for y in [y2]: ax_leg.annotate(s=f\"{v}\", xy=(x[vi] + 0.5, y[vi]+0.05 +", "def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths')", "+ str(thr_weekly_cases_per_1M) + '} ' r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction',", "jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) == 2 col1, col2", "dots + thin black for y in [y1, y2, y3]: xx, yy =", "l, b, w, h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last", "show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$')", "'#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = {", "def jointly_trimmed_trajs(trajs, locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) == 2 col1,", "'bottom' in spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.)", "except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if not locate_set: print('Warning: US English locale could", "ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black', xy=(x[vi]+extra_shift + 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False)", "'A': ax.annotate(loc, xycoords='data', xy=(x + 65 - (0 if location not in label_shifts", "for item in (ax.xaxis.label, ax.yaxis.label): item.set_fontsize(7.00) for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25)", "i in ii] return df elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths", "of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0,", "\" + f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$", "assert fday <= last_avail_day panel_letter = chr(ord('A') + i) make_subplot_(axes[i], trajs, locations, fday,", "for ci, country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in", "+0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line for y in [y1, y2,", "'-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for", "ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:',", "0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.16", "is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho, wrho = correlations(values,", "in range(n_fronts): fronts_locations = [__ for _ in fronts for __ in _]", "pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\",", "0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90),", "== 'A': ax.annotate(loc, xycoords='data', xy=(x + 65 - (0 if location not in", "== 'monthly': immobi = immobi.cumsum().groupby(pd.Grouper(freq='M')).nth(0) deaths = deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={", "'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis row_i = ci//ncols", "#pylint: disable = C, R #pylint: disable = E1101 # no-member (generated-members) #pylint:", "pthff from colorsys import rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale import dill", "in generated figures.') # -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width']", "-------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5", "65 - (0 if location not in label_shifts else label_shifts[location][0]), y**0.9999 * (1", "0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10,", "panel_letter is not None: csv_fn = f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols =", "E1101 # no-member (generated-members) #pylint: disable = C0302 # too-many-lines \"\"\" This code", "spines: ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks([]) def set_ticks_lengths(ax): ax.tick_params(which='major', length=2., labelsize=7) ax.tick_params(which='minor', length=1.) def darken(color,", "shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available", "day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data = force_end edited_trajs = {} assert len(cols) == 2", "(0.20, 0.00, 0.99), 'Japan': (0.9, 0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan':", "0.00), 'Norway': plt.cm.tab10(0), 'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8),", "finals = {} for loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T", "= \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading, xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\", clip_on=False,", "+ f\"{rho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ =", "np.place(z, z < 0, 0) lwidths = [1*np.log(1 + z)] for segi, seg", "return day_of_last_available_data, edited_trajs def extract_cumulative_immobilization_and_deaths(trajectories, country, interval): trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']]", "if last_index_is_green is None or last_index_is_green == True: if green_stretches: green_stretches[-1] += [index]", "ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept", "line (BEGIN) lwidths = [1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2)", "'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil':", "index in nongreen_indices: if last_index_is_green is None or last_index_is_green == True: if green_stretches:", "color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) elif kind == 'deaths': # ==--- days_back =", "0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif')", "= np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line for y", "0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1,", "sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not", "if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8) else: if", "f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) elif kind == 'deaths': if cc in low_mortality_locations:", "continue if loc not in ed_locations: print(f\"{loc} in figure {fig_name}: deaths will be", "- 0.03*show_tail, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\",", "df = trajs[country] df_sel = df[ ~df[col1].isnull() & ~df[col2].isnull() ] last_day = df_sel.iloc[-1].name", "loc, spine in ax.spines.items(): if loc in spines: if loc == 'left': spine.set_position(('outward',", "# variable thickness line (BEGIN) lwidths = [1*np.log(1 + np.array(z))] points = np.array([x,", "mpl.plot df.at[missing_day, Rt_col] = np.nan # cannot be pd.NA because used in mpl.plot", "ax in fig.axes: if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1],", "alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False) def make_subplot_(ax, trajs, locations, final_day, show_fronts,", "= not loc in STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan', 'Japan', 'South", "'deaths' df = df[above_min_cumul_indices] # DATA: nullify missing days to obtain visual discontinuities", "h = fig.axes[-1].get_position().bounds fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\"", "[] last_index_is_green = None for index, value in df.iterrows(): if index in green_indices:", "/ ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval", "(0.05, 0.50, 0.15), 'North Carolina': (0.10, 0.00, 0.95), 'New York': (0.60, 0.30, 0.00),", "color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2),", "= scipy.stats.pearsonr(values[:,0], values[:,1])[0] wrho = wstats.DescrStatsW(values, weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines,", "not this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values", "variable thickness line (BEGIN) lwidths = [0.7 * (0 + np.log(z))] points =", "available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available day", "0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70,", "fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations,", "0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f}\", xy=(0.15, 0.94),", "va=\"center\") ax_leg.annotate(text=r'when \\textbf{$>$ ' + str(thr_weekly_cases_per_1M) + r'} ' r'new cases /week /M',", "[float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) #", "deaths in low-mortality locations if kind == 'deaths' and country in low_mortality_locations: ax.annotate(s=country,", "days_of_last_available_data = set() for country in locations: if skipped and country in skipped:", "'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily': immobi = immobi.cumsum() deaths =", "range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if", "= df_sel.iloc[-1].name days_of_last_available_data.add(last_day) if verbose: print(country, last_day.strftime('%b%d')) day_of_last_available_data = min(days_of_last_available_data) if force_end is", "= [[front_coords[0][0] + 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] + 0.8 + 1.2*np.sin((180", "< 0, 0) lwidths = [z] for segi, seg in enumerate(segments): seg =", "+ np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1)", "loc = r'\\textit{' + loc + r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data',", "ax.plot(*front_coords, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8)", "range(1, len(im)): m, ms = [('s', 1.7), ('D', 1.55), ('p', 2.2)][i % 3]", "points_eur, weights_eur = [], [] for loc in locations: if population(loc) < MIN_POPULATION_M", "alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per case:', xy=(0.5, 0.84), xycoords='axes fraction', fontsize=8, ha=\"center\", va=\"center\")", "scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000], linewidth=0.75, linestyle='--', color='#aaaaaa', zorder=5) weights =", "0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6), 'Belgium': plt.cm.tab10(5), 'Bulgaria': plt.cm.tab10(2), 'Croatia': (0.50,", "fig.axes[-1].set_position([l, b - 0.0185, w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\",", "color='#aaaaaa', zorder=5) weights = [] for _, row in df.iterrows(): location = row.name", "y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000',", "= correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ =", "extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de = scale_deaths(de) put_final_dot(ax, loc, im[-1], de[-1], show_population_halo=show_population_halo) if show_tail:", "for i in range(9): points, weights = [], [] for loc in locations:", "-gdp_2020h1[loc] immob, _ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax", "fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') #", "'.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'),", "= extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho,", "scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho)", "import locale import dill import gzip from shared import * register_matplotlib_converters() locate_set =", "points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights)", "in enumerate(segments): seg = seg.T el = min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3)", "zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests per", "country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations", "+ 0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def", "'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x],", "] elif last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green = True elif index", "in excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) )", "'North Carolina': (0.10, 0.00, 0.95), 'New York': (0.60, 0.30, 0.00), 'Ohio': (0.65, 0.00,", "if loc not in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name} because of missing", "color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97), xycoords='axes fraction', color=ANNOT_COLOR)", "figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT:", "marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False, color=color_of(location), markerfacecolor=color_of(location)) loc", "% 3] ax.plot(im[-1 - i], de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33,", "clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77),", "= 0.1 + (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-',", "black for y in [y1, y2, y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx", "color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color)", "<-- not this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z =", "and country in skipped: continue df = trajs[country] df_sel = df[ ~df[col1].isnull() &", "' r'new cases /week /M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no", "label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <= last_avail_day", "color='#ffaaee', clip_on=False, zorder=10) # variable thickness line (BEGIN) lwidths = [0.7 * (0", "plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------", "'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65, 0.15, 0.00), 'Colombia': (0.00, 0.10, 0.65), 'Ecuador':", "zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes", "= df[f\"new_{kind}\"].ffill().rolling(window=7, min_periods=7, **ROLL_OPTS).sum()\\ / population(country) assert len(df_freq) == len(df) green_indices = df[df_freq", "+ 0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z =", "= df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]],", "( 40, 1.014), 'Michigan': (800, 1.018), 'Florida': ( 0, 0.987), 'Illinois': ( 90,", "import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt import matplotlib.dates as dts import", "'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax)) ticks", "= plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories,", "show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = []", "fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig if __name__", "f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients", "missing days to obtain visual discontinuities for missing_day in missing_days[country]: if df.index[0] <=", "ticks[1:]]) def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax,", "#pylint: disable = C0302 # too-many-lines \"\"\" This code features the article \"Pareto-based", "1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00),", "finals_remaining = [(*im_de, loc) for loc, im_de in finals.items() if loc not in", "\"Pareto-based evaluation of national responses to COVID-19 pandemic shows that saving lives and", "0, 0.999), 'Georgia': (825, 0.991) } if label_shifting == 'A' else {} if", "f\"{wrho:.2f}\", xy=(0.15, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn,", "linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction',", "show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2", "+ lwidths[0][segi] / 8 else: el = 0.10 + lwidths[0][segi] / 14 co", "front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0] - 0.8 +", "i) make_subplot_(axes[i], trajs, locations, fday, show_fronts=show_fronts and i>0, panel_letter=panel_letter) axes[i].annotate(r'\\large\\textbf{' + panel_letter +", "fn = f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations,", "[] for i in range(9): points, weights = [], [] for loc in", "if kind == 'cases': assert last_day in trajs[cc].index, \\ print(f\"Day {last_day} not available", "1)) # tracer line for y in [y1, y2, y3]: xx = [float(x[0])", "last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations, cols, force_end=fday) assert fday <= last_avail_day panel_letter =", "= excess_deaths.keys() points, weights = [], [] points_eur, weights_eur = [], [] for", "# -- Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary',", "+ [float(x[-1]) - 0.125] ax_leg.plot(xx, y, linestyle='-', linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10)", "for y in [y1, y2, y3]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx +", "lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional) finals =", "sd[1] < cutoff) or (not optima and sd[1] > cutoff): front += [sd[2]]", "f\"Figure{fig_name}{panel_letter}.csv\" np.savetxt(csv_fn, values, header='lockdown,sqrt_deaths', delimiter=',') cols = ['mobility', 'new_deaths'] # set up the", "\"\"\" # -------------------------------------------------------------------------------------------------- import re from operator import itemgetter from multiprocessing import Pool", "plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig", "panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax)", "z)] for segi, seg in enumerate(segments): seg = seg.T if kind == 'cases':", "xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig", "fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn", "axis=1) de = df[['new_deaths14']] ca = df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back, freq", "of excess deaths\") if loc not in gdp_2020h1: print(f\"{loc} skipped in figure {fig_name}", "fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig", "clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel title", "/ ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax,", "linewidth=0.75, alpha=1, solid_capstyle='round', color='#ffaaee', clip_on=False, zorder=10) # variable thickness line (BEGIN) lwidths =", "tick labels in generated figures.') # -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] =", "+ z)] for segi, seg in enumerate(segments): seg = seg.T if kind ==", "color='#000000', alpha=0.33, zorder=40) # PLOT: panel title ax.annotate(text=country, xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9,", "locations if kind == 'deaths' and country in low_mortality_locations: ax.annotate(s=country, xy=(0.5, 0.88), xycoords='axes", "loc = location.replace('United Kingdom', 'UK') if italic: loc = r'\\textit{' + loc +", "'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction':", "'.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-',", "91, 10)] else: front_coords = [[front_coords[0][0] - 0.707*180 + 180*np.cos((180 + i)/360*2*3.14159), front_coords[1][0]", "(BEGIN) lwidths = [1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1, 1, 2) segments", "== False: green_stretches += [ [index] ] elif last_index_is_green == True: green_stretches[-1] +=", "/ 8 else: el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(SNAIL_ORANGE,", "'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR =", "= -0.08 if v in [100, 300, 1000] else 0 ax_leg.annotate(text=f\"{v}\"[::-1].replace('000', 'k')[::-1], color='black',", "try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try: locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL,", "[ [index] ] elif last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green = True", "assert last_day in trajs[cc].index, \\ print(f\"Day {last_day} not available for {cc} that ends", "x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments", "+ thin black for y in [y1, y2, y3]: xx, yy = x[:-1],", "PLOT: dots + thin black x, y = df[[mob_col, Rt_col]].values.T ax.scatter(x, y, s=0.025,", "loc) for loc, im_de in finals.items() if loc not in fronts_locations and loc", "final_day], show_fronts=True, show_tail=False, show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True,", "s=None) def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0, 1), reverse=not optima) # x-ascending", "2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half of 2020 /", "{fig_name} because of missing GDP data\") continue is_in_Europe = not loc in STATE_TO_ABBREV", "= deaths.cumsum().groupby(pd.Grouper(freq='M')).nth(0) / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) ii =", "= 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary", "DATA: nullify missing days to obtain visual discontinuities for missing_day in missing_days[country]: if", "clip_on=False, color=light_color, markerfacecolor=light_color) else: marker_size = 6 ax.plot([x], [y], '-.', marker='8' if is_extra_country", "'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col =", "xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, ha=\"center\", va=\"center\", clip_on=False, zorder=100, path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop", "y**0.9999 * (1 if location not in label_shifts else label_shifts[location][1])), color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False)", "for loc in finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes", "(0 if location not in label_shifts else label_shifts[location][0]), y**0.9999 * (1 if location", "= (len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci,", "optimal) fronts.append(front) for front_i, front in enumerate(fronts): color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0,", "0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':'", "this z = de.join(ca) z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z,", "), 'Ohio': ( 40, 1.014), 'Michigan': (800, 1.018), 'Florida': ( 0, 0.987), 'Illinois':", "1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) de = df[['new_deaths14']] ca = df[['new_cases14'", "= sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) #", "def plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals,", "xy=(0.5, 0.88), xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day,", "plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05, 0.05), 'United Kingdom':", "fronts_locations and loc not in OUT_OF_FRONT] front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i,", "ax.spines.items(): if loc in spines: if loc == 'left': spine.set_position(('outward', left_shift)) elif loc", "# %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5,", "yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='black', alpha=0.5, clip_on=False,", "{fig_name}\") continue if loc not in ed_locations: print(f\"{loc} in figure {fig_name}: deaths will", "np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi, seg in", "0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida': (0.95, 0.40, 0.00), 'Georgia': (0.80, 0.10,", "'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False) x, y1, y2 = [],", "solid_capstyle='round', zorder=20) elif kind == 'deaths': # ==--- days_back = 14 x, y", "of missing GDP data\") continue is_in_Europe = not loc in STATE_TO_ABBREV and not", "weights = [], [] for loc in locations: im_de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').iloc[-1", "final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved", "in figure {fig_name}\") continue if loc not in ed_locations: print(f\"{loc} in figure {fig_name}:", "#!/usr/bin/env python3 #pylint: disable = C, R #pylint: disable = E1101 # no-member", "loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x,", "in green_indices: if last_index_is_green is None or last_index_is_green == False: green_stretches += [", "are non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific Reports, 2021). License: MIT Last", "as wstats import matplotlib.pyplot as plt import matplotlib.dates as dts import matplotlib.ticker as", "f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image as PDF fig.tight_layout() fn =", "plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor)", "color=sns.set_hls_values(color, l=0.3), fontsize=7, zorder=10) weights.append(population(location)) rho, wrho = correlations(df.values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97),", "= E1101 # no-member (generated-members) #pylint: disable = C0302 # too-many-lines \"\"\" This", "if show_population_halo: marker_size = 3.5 diameter = np.sqrt(population(location)) * 3 light_color = color_of(location)", "0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00),", "adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half of", "shown)\", xy=(0.5, 0.67), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left',", "on testing', xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in", "jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases': assert", "= population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind)", "2 for country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() &", "trajs, locations, final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown')", "for ng in nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day) #", "Korea': ( 52, 0.59 ), 'Portugal': ( 0, 0.97 ), 'Bulgaria': (830, 0.994),", "in enumerate(segments): seg = seg.T if kind == 'cases': el = 0.15 +", "scipy.stats import statsmodels.stats.weightstats as wstats import matplotlib.pyplot as plt import matplotlib.dates as dts", "= sd[1] return front def put_final_dot(ax, location, x, y, is_extra_country=False, is_tail_shown=False, show_population_halo=False, label_shifting='A',", "\\ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation", "0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70,", "left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss", "color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False)", "finals, n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:' if show_corr_history else 'Last day:')", "%d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) # annotation: correlation coefficients values =", "in finals.keys()]) rho, wrho = correlations(values, weights) ax.annotate(r'Correlation:', xy=(0.0, 0.97), xycoords='axes fraction', color=ANNOT_COLOR)", "elif interval == 'weekly': immobi = immobi.resample('W').sum().cumsum() deaths = deaths.resample('W').sum().cumsum() / ppl df", "show_fronts, show_tail, show_corr_history, show_population_halo, fig_name='X', scale_deaths=np.sqrt): def draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts =", "ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000],", "locations, final_day, missing_days, excess_deaths, gdp_2020h1 = dill.load(f) print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01')", "def put_legend_deaths(ax_leg): z = [1, 3, 10, 30, 100, 300] x = np.array(list(range(len(z))))", "last day ax.annotate(str('Date:' if show_corr_history else 'Last day:') + \\ f\" {final_day.strftime('%B %d,", "immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5): ax.set_ylim((0, ymax))", "'Georgia': (0.80, 0.10, 0.60), 'Illinois': (0.75, 0.50, 0.00), 'Michigan': (0.05, 0.50, 0.15), 'North", "path_effects=[pthff.Stroke(linewidth=2, foreground=facecolor), pthff.Normal()]) pop = population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading", "xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} '", "np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export image as PDF fig.tight_layout() fn = f\"Figure{fig_name}.pdf\"", "'Mexico': (0.00, 0.50, 0.60), 'Peru': (0.75, 0.50, 0.25), } if country in colors.keys():", "marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals,", "# ==--- days_back = 14 x, y = df[[mob_col, Rt_col]].values.T points = np.array([x,", "if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur) rho, wrho =", "= np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country", "'Illinois': ( 90, 1.016), 'North Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0, 0.999),", "1000, 3000, 10000] x = np.array(list(range(len(z)))) y1 = np.ones(len(x))*0.62 y2 = np.ones(len(x))*0.31 y3", "ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st}", "[immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated", "if not locate_set: print('Warning: US English locale could not be set. Check tick", "(0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10,", "f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M):", "6 nrows = (len(locations))//ncols + 1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows))", "0.00, 0.00), 'South Korea': (0.70, 0.60, 0.65), 'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90,", "days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48), fancybox=False, fontsize=6.75)", "= [z] for segi, seg in enumerate(segments): seg = seg.T if kind ==", "xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only:", "linestyle='--', color='#aaaaaa', zorder=5) weights = [] for _, row in df.iterrows(): location =", "the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st}", "{fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1, fig_name, scale_deaths=np.sqrt): fig, ax", "verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d'), '==FORCED=>', force_end.strftime('%b%d')) day_of_last_available_data =", "return cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT: variable thickness line for stretch,", "ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=marker_size, markeredgewidth=0, alpha=0.8, clip_on=False,", "2.2)][i % 3] ax.plot(im[-1 - i], de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None,", "'#77ffaa', '#aabbdd', '#885500' ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors =", "in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100)", "final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1') fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2')", "1 fig, _ = plt.subplots(nrows=nrows, ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations):", "left_shift)) elif loc == 'bottom': spine.set_position(('outward', bottom_shift)) else: spine.set_color('none') if 'left' in spines:", "sd in sorted_data[1:]: if (optima and sd[1] < cutoff) or (not optima and", "nrows-1: ax.set_xlabel('Mobility', labelpad=-1) ax.set_xlim((-100, 0)) ax.set_xticks((-100, 0)) #ax.xaxis.set_major_formatter(tckr.PercentFormatter(decimals=0)) ax.set_xticklabels((r'$-100\\%$', r'$0\\%$')) # PLOT: Y-axis", "# cases even if kind == 'deaths' df = df[above_min_cumul_indices] # DATA: nullify", "0.1 + (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color,", "trajectory since 100 cumulative cases min_cumul = 100 above_min_cumul_indices = df['total_cases'] >= min_cumul", "y = df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1],", "'Taiwan': (0.10, 0.80, 0.00), 'California': (0.90, 0.70, 0.00), 'Canada': (0.00, 0.45, 0.80), 'Florida':", "plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] = 1.67 plt.rcParams['ytick.major.pad'] = 1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text',", "0.9)) ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.48),", "COVID-19-related deaths in the 1\\textsuperscript{st} half of 2020 / M}}$') ax.set_xlim((-2, 14)) make_sqrt_deaths_yaxis(ax)", "alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, zorder=40) # PLOT: panel", "df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ] if cleanup else", "1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year", "0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.16 -", "np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country else", "np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [1*np.log(1 + z)]", "f\"Figure{fig_name}.pdf\" fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig if __name__ == '__main__': with", "< MIN_POPULATION_M or loc=='Serbia': print(f\"{loc} skipped in figure {fig_name}\") continue if loc not", "# annotation: last day ax.annotate(str('Date:' if show_corr_history else 'Last day:') + \\ f\"", "points = np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for segi,", "'-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots + thin black x,", "yy, linestyle='--', linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes", "1.33 plt.rc('font', size=8, family='sans-serif') plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions", "y2, y3]: xx = [float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125]", "# dots + thin black for y in [y1, y2, y3]: xx, yy", "(optima and sd[1] < cutoff) or (not optima and sd[1] > cutoff): front", "f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"population-weighted Pearson's $\\rho$ = \"", "8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional)", "show_corr_history=True, show_population_halo=True, fig_name='1') figS1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, final_day, show_fronts=False, show_tail=True, show_corr_history=False, show_population_halo=False, fig_name='S1')", "0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for y", "fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') #", "'Poland': (0.15, 0.65, 1.00), 'Portugal': (0.95, 0.65, 0.00), 'Romania': plt.cm.tab10(8), 'Russia': (0.80, 0.45,", "sorted_data[0][2] ] cutoff = sorted_data[0][1] for sd in sorted_data[1:]: if (optima and sd[1]", "(END) points = np.array([x, y2]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) for", "import re from operator import itemgetter from multiprocessing import Pool import pandas as", "is None: if verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else:", "f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\",", "import matplotlib.pyplot as plt import matplotlib.dates as dts import matplotlib.ticker as tckr import", "= {} assert len(cols) == 2 for country in locations: df = trajs[country].loc[:day_of_last_available_data]", "0.09, 0.45, 0.2]) adjust_spines(ax, ['left', 'bottom'], left_shift=7) ax.annotate(r'\\large\\textbf{C}', xy=(-0.275, 1.06), xycoords='axes fraction', clip_on=False)", "+ f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\"", "country in locations: df = trajs[country].loc[:day_of_last_available_data] edited_trajs[country] = df[ ~df[col1].isnull() & ~df[col2].isnull() ]", "ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2,", "weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5,", "['mobility', 'new_deaths'] # set up the figure if show_corr_history: fig, axes = plt.subplots(ncols=2,", "if ax.is_last_row() and ax.is_last_col(): ax.set_axis_off() if kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind", "0.5, y[vi]+0.05+0.005*vi), xycoords='data', fontsize=5.75, ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1,", "1.014), 'Michigan': (800, 1.018), 'Florida': ( 0, 0.987), 'Illinois': ( 90, 1.016), 'North", "+ f\"{wrho:.2f}\", xy=(0.16 - 0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if", "new death /week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi,", "print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last shared", "14 co = sns.set_hls_values(SNAIL_ORANGE, l=el) ax.plot(seg[0], seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='round', zorder=20)", "if __name__ == '__main__': with gzip.open('processed_data.dill.gz', 'rb') as f: trajectories, locations, final_day, missing_days,", "np.place(z, np.isinf(z), 1000) np.place(z, z < 0, 0) lwidths = [z] for segi,", "= z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z), 1000)", "header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn = f\"Figure{fig_name}_eur.csv\" np.savetxt(csv_fn, values_eur, header='gdp_loss,sqrt_deaths', delimiter=',') # export image as", "trajs, locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')}", "0.25, 1.00), 'Texas': (0.35, 0.40, 0.40), 'Argentina': (0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65,", "in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the", "== 'deaths' df = df[above_min_cumul_indices] # DATA: nullify missing days to obtain visual", "assert len(df_freq) == len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >=", "enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) # PLOT: deaths in low-mortality locations if kind", "-trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval == 'monthly': immobi =", "{} for loc in locations: im, de = extract_cumulative_immobilization_and_deaths(trajs, loc, 'monthly').values.T de =", "+ np.log(z))] points = np.array([x, y1]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1)", "==--- days_back = 14 x, y = df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1,", "ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\",", "i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color, 0.9), markeredgecolor=darker_color, alpha=alpha) ax.plot(im[-1], de[-1], '.',", "if show_tail: color = color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im, de,", "in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of", "= np.array([finals[loc] for loc in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if", "points[1:]], axis=1) de = df[['new_deaths14']] ca = df[['new_cases14' ]] ca = ca.set_index( ca.index.shift(+days_back,", "%d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction', color=ANNOT_COLOR) fn = f\"Figure{fig_name}_{last_day.strftime('%b%d')}.pdf\" fig.savefig(fn) print(f\"Saved figure", "color = sns.set_hls_values('gray', l=0.1 + 0.04*(max(0, front_i - 1*optimal))) # TMP: was 0.15+0.1*", "[gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop + 0.028), xycoords='data',", "xx = [float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1]) - 0.125] ax_leg.plot(xx, y,", "plt.rc('text', usetex=True) plt.rc('text.latex', preamble=r'''\\usepackage{cmbright}''') # -- Plotting auxiliary functions ------------------------------------------------------------------ # manual tweaks:", "xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values,", "total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d} {:s} in total\".format(int(round(total)), kind), xy=(0.5, 0.77), xycoords='axes fraction',", "thr_weekly_cases_per_1M].index nongreen_indices = df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green =", "(tests_per_hit > 10000), 10000) z = 0.7*np.log(0 + tests_per_hit) np.place(z, np.isnan(z), 0) np.place(z,", "gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) set_ticks_lengths(ax) ax.set_xlabel(r'Cumulated mobility", "panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09, 0.45,", "_, row in df.iterrows(): location = row.name color = color_of(location) mob_red, gdp_drop =", "np.array(points), np.array(points_eur) rho, wrho = correlations(values, weights) rho_eur, wrho_eur = correlations(values_eur, weights_eur) ax.annotate(r'Correlation:',", "show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values,", "3.5 diameter = np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x], [y], '-.', marker='8'", "lwidths = [1*np.log(1 + z)] for segi, seg in enumerate(segments): seg = seg.T", "kind == 'cases': # ==--- # PLOT: pink tracer line ax.plot(*df[[mob_col, Rt_col]].values.T, linestyle='-',", "front_i - 1*optimal))) # TMP: was 0.15+0.1* front_coords = np.array([finals[loc] for loc in", "in range(9): points, weights = [], [] for loc in locations: im_de =", "last_index_is_green == False: nongreen_stretches[-1] += [index] last_index_is_green = False stretches = [( g,", "z['cases14_per_death14'] = z['new_cases14'] / z['new_deaths14'] z = z['cases14_per_death14'].values np.place(z, np.isnan(z), 0) np.place(z, np.isinf(z),", "y in [y2]: xx, yy = x[:-1], y[:-1] ax_leg.scatter(xx + 0.5, yy, s=0.025,", "seg in enumerate(segments): seg = seg.T if kind == 'cases': el = 0.15", "kind='cases', thr_weekly_cases_per_1M=20): assert kind in ('cases', 'deaths') trajs_orig = trajs.copy() low_mortality_locations = ['Taiwan',", "= trajs.copy() low_mortality_locations = ['Taiwan', 'Slovakia', 'New Zealand'] mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\"", "':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else 0.8, clip_on=False)", "1), reverse=not optima) # x-ascending front = [ sorted_data[0][2] ] cutoff = sorted_data[0][1]", "df.index[0] <= missing_day and missing_day <= FINAL_DAY: df.at[missing_day,mob_col] = np.nan # cannot be", "l=0.1 + 0.04*(max(0, front_i - 1*optimal))) # TMP: was 0.15+0.1* front_coords = np.array([finals[loc]", "ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} ' r'new cases /week /M', xy=(0.5,", "'bottom'] if ax.is_first_col() else ['bottom']) set_ticks_lengths(ax) # PLOT: legend for ax in fig.axes:", "_ = extract_cumulative_immobilization_and_deaths(trajectories, loc, 'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5,", "last_index_is_green == False: green_stretches += [ [index] ] elif last_index_is_green == True: green_stretches[-1]", "not loc in ['Canada', 'Taiwan', 'Japan', 'South Korea'] deaths = max(excess_deaths[loc] if loc", "verbose: print(f\"Last shared available day ({' & '.join(cols)}):\", day_of_last_available_data.strftime('%b%d')) else: if verbose: print(f\"Last", "df.index = [i.replace(day=1) for i in ii] return df elif interval == 'weekly':", "0.5, y[vi]+0.05 + 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs,", "= [i.replace(day=1) for i in ii] return df elif interval == 'weekly': immobi", "'Germany': (0.55, 0.25, 0.70), 'Hungary': (0.35, 0.35, 0.35), 'Greece': (0.45, 0.75, 1.00), 'Italy':", "/M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z):", "ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2, clip_on=False,", "------------------------------------------------------------------ # manual tweaks: OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors:", "= 0.5 plt.rcParams['ytick.major.width'] = 0.5 plt.rcParams['xtick.minor.width'] = 0.5 plt.rcParams['ytick.minor.width'] = 0.5 plt.rcParams['xtick.major.pad'] =", "final_day, show_fronts, panel_letter=None): adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlim((0, 8e3)) ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative", "[y1, y2, y3]: xx = [float(x[0]) + 0.125] + list(x[1:-1]) + [float(x[-1]) -", "fig_name, scale_deaths=np.sqrt): fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'], left_shift=10) ax.set_xlabel(r'GDP loss", "z = [1, 3, 10, 30, 100, 300] x = np.array(list(range(len(z)))) y2 =", "markersize=7, markerfacecolor='#00000000', markeredgecolor='black', markeredgewidth=0.5, label=r'population-weighted $\\rho$') ax.plot(x, y1, '.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5,", "for segi, seg in enumerate(segments): seg = seg.T el = 0.1 + (lwidths[0][segi]", "/ population(country) assert len(df_freq) == len(df) green_indices = df[df_freq < thr_weekly_cases_per_1M].index nongreen_indices =", "0.30, 0.00), 'Ohio': (0.65, 0.00, 0.00), 'Pennsylvania': (0.20, 0.25, 1.00), 'Texas': (0.35, 0.40,", "1000) np.place(z, z < 0, 0) lwidths = [1*np.log(1 + z)] for segi,", "# -- Shared plot settings -------------------------------------------------------------------------- plt.rcParams['axes.linewidth'] = 0.5 plt.rcParams['xtick.major.width'] = 0.5 plt.rcParams['ytick.major.width']", "line (BEGIN) lwidths = [0.7 * (0 + np.log(z))] points = np.array([x, y1]).T.reshape(-1,", "variable thickness line (BEGIN) lwidths = [1*np.log(1 + np.array(z))] points = np.array([x, y2]).T.reshape(-1,", "color_of(location) ax.plot([x], [y], '-.', marker='8' if is_extra_country else 'o', linewidth=1, markersize=diameter, markeredgewidth=0, alpha=0.2,", "trajectory = trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if", "/M', xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5,", "last_index_is_green == True: if green_stretches: green_stretches[-1] += [index] # extra point for smooth", "fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name):", "10)] ax.plot(*np.array(front_coords).T, ':' if optimal else '--', c=color, alpha=0.8, linewidth=1.1 if optimal else", "ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional) finals = {}", "co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-', color=co, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False)", "ax.set_xlabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths", "- 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T,", ">= min_cumul # cases even if kind == 'deaths' df = df[above_min_cumul_indices] #", "Pool import pandas as pd import seaborn as sns import numpy as np", "sns.set_hls_values(SNAIL_ORANGE, l=el) ax_leg.plot(seg[0]-0.025, seg[1], '-', color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots", "ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper center', bbox_to_anchor=(0.5,", "License: MIT Last changes: November 09, 2020 \"\"\" # -------------------------------------------------------------------------------------------------- import re from", "['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd',", "figure {fig_name}: deaths will be used in place of excess deaths\") if loc", "df.iterrows(): location = row.name color = color_of(location) mob_red, gdp_drop = row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red],", "xy=(0.5, 0.055), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") for vi, v in enumerate(z): for", "False stretches = [( g, SNAIL_GREEN ) for g in green_stretches] \\ +", "-0)) ax_leg.set_ylim((0, 1)) # tracer line for y in [y1, y2, y3]: xx", "used in place of excess deaths\") if loc not in gdp_2020h1: print(f\"{loc} skipped", "linewidth=0.1, color='black', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(s=r'Cases per death:', xy=(0.5, 0.63), xycoords='axes fraction', fontsize=8,", "kind == 'cases': put_legend_cases(fig.axes[-1], thr_weekly_cases_per_1M) elif kind == 'deaths': put_legend_deaths(fig.axes[-1]) # PLOT: export", "0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f}\", xy=(0.15, 0.97),", "seg = seg.T el = 0.1 + (lwidths[0][segi] - 0.)/14 color = sns.set_hls_values(SNAIL_ORANGE,", "plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df = pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location')", "print('Locations count:', len(locations)) jul01 = pd.to_datetime('2020-07-01') fig1 = plot_cumulative_immobilization_and_deaths(trajectories, locations, [jul01, final_day], show_fronts=True,", "= r'\\textit{' + loc + r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x", "if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for i, fday in enumerate(final_day):", "color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}_all.csv\" np.savetxt(csv_fn, values, header='gdp_loss,sqrt_deaths', delimiter=',') csv_fn =", "in stretches: x, y = df.loc[stretch, [mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1,", ") put_final_dot(ax, loc, x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if", "color=sns.set_hls_values(color_of(location), l=0.3), clip_on=False) else: ax.annotate(loc, xycoords='data', xy=(x + 0.13, y + 0.04), color=sns.set_hls_values(color_of(location),", "'monthly').iloc[-1 - i] points.append([im_de[0], scale_deaths(im_de[1])]) weights.append(population(loc)) points = np.array(points) rho, wrho = correlations(points,", "if optimal else 0.8) else: if optimal: front_coords = [[front_coords[0][0] + 0.707*180 +", "label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months of 2020') ax.set_ylabel(r\"Pearson's $\\rho$\") ax.legend(loc='upper", "/ 8 else: el = 0.10 + lwidths[0][segi] / 14 co = sns.set_hls_values(color,", "x, y, show_population_halo=True, label_shifting=False, italic=not is_in_Europe) points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y])", "non-trade-off objectives\" by Kochanczyk & Lipniacki (Scientific Reports, 2021). License: MIT Last changes:", "= [], [] last_index_is_green = None for index, value in df.iterrows(): if index", "ax_leg.scatter(xx + 0.5, yy, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5,", "if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x + 65 - (0 if location", "color=color, linewidth=lwidths[0][segi], alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # dots + thin black for y", "weights_eur) ax.annotate(r'Correlation:', xy=(-0.01, 0.97), xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" +", "'.-', linestyle='dashed', linewidth=0.5, color='#333333', markersize=5.5, label=r'non-weighted $\\rho$') ax.set_ylim((0.5, 0.9)) ax.set_xlabel(r'First days of months", "[], [] points_eur, weights_eur = [], [] for loc in locations: if population(loc)", "1e6*is_USA_state(cc) else: return trajs[cc].loc[last_day, f\"total_{kind}\"] / population(cc) + 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita,", "col2 = cols days_of_last_available_data = set() for country in locations: if skipped and", "xycoords='axes fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col()", "locale.setlocale(locale.LC_TIME, 'en_US.utf8') locale.setlocale(locale.LC_ALL, 'en_US.utf8') locate_set = True except: locale.setlocale(locale.LC_TIME, 'POSIX') locale.setlocale(locale.LC_ALL, 'POSIX') if", "locate_set = False try: locale.setlocale(locale.LC_TIME, 'en_US') locale.setlocale(locale.LC_ALL, 'en_US') locate_set = True except: try:", "figure file {fn}.\") return fig def plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, final_day, gdp_2020h1, fig_name): df =", "nongreen_stretches] def by_first_day(cs): return cs[0][0] stretches = sorted(stretches, key=by_first_day) # PLOT: variable thickness", "for label in (ax.get_xticklabels() + ax.get_yticklabels()): label.set_fontsize(6.25) else: last_avail_day, trajs = jointly_trimmed_trajs(trajectories, locations,", "r'}' if label_shifting == 'A': ax.annotate(loc, xycoords='data', xy=(x + 65 - (0 if", "missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1, fig_name='4') fig5 = plot_gdp_drop_and_excess_deaths(trajectories, locations,", "alpha=1, solid_capstyle='butt', zorder=20, clip_on=False) # variable thickness line (END) points = np.array([x, y2]).T.reshape(-1,", "df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df elif interval == 'daily':", "xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) +", "w, h]) fig.axes[-1].annotate('Last day:' + f\" {final_day.strftime('%B %d, %Y')}\", xy=(0.0, 1.01), xycoords='axes fraction',", "x, y = df[[mob_col, Rt_col]].values.T points = np.array([x, y]).T.reshape(-1, 1, 2) segments =", "3] ax.plot(im[-1 - i], de[-1 - i], '.', marker=m, markersize=ms, fillstyle=None, markeredgewidth=0.33, markerfacecolor=darken(color,", "x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax, loc, x, y, show_population_halo=True,", "va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue #", "np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1))", "weights.append(population(loc)) points = np.array(points) rho, wrho = correlations(points, weights) x.append(im_de.name) y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b'))", "n_fronts=2, optimal=False) # annotation: last day ax.annotate(str('Date:' if show_corr_history else 'Last day:') +", "+ 0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0, 91, 10)] else:", "y1.append(rho) y2.append(wrho) ax.xaxis.set_major_formatter(dts.DateFormatter('%b')) # %d ax.yaxis.set_major_locator(tckr.MultipleLocator(0.1)) ax.plot(x, y2, '.-', linestyle='dotted', linewidth=0.5, color='#333333', markersize=7,", "ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155, 0.97), xycoords='axes fraction',", "= jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if kind == 'cases':", "assert final_day <= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6, 5)) make_subplot_(axes, trajs, locations,", "fig2 = plot_R_vs_mobility_reduction(trajectories, locations, jul01, missing_days, fig_name='2') fig4 = plot_cumulative_immobilization_and_gdp_drop(trajectories, locations, jul01, gdp_2020h1,", "points.append([x, y]) weights.append(population(loc)) if is_in_Europe: points_eur.append([x, y]) weights_eur.append(population(loc)) values, values_eur = np.array(points), np.array(points_eur)", "front = pareto_front(finals_remaining, optimal) fronts.append(front) for front_i, front in enumerate(fronts): color = sns.set_hls_values('gray',", "y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch, 'tests_per_hit'].values np.place(tests_per_hit,", "(year-on-year \\%)') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf COVID-19-related deaths in the 1\\textsuperscript{st} half of 2020 / M}}$')", "in mpl.plot df.sort_index(inplace=True) if kind == 'cases': # ==--- # PLOT: pink tracer", "for __ in _] finals_remaining = [(*im_de, loc) for loc, im_de in finals.items()", "xycoords='axes fraction', fontsize=9, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) total = trajs_orig[country].loc[last_day, f\"total_{kind}\"] ax.annotate(s=\"{:d}", "jointly_trimmed_trajs(trajectories, locations, cols, force_end=final_day) assert final_day <= last_avail_day fig, axes = plt.subplots(ncols=1, figsize=(6,", "clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33, clip_on=False, zorder=40) ax_leg.annotate(text=r'Tests", "loc in front]).T if len(front_coords.T) > 1: ax.plot(*front_coords, ':' if optimal else '--',", "in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14)) slope,", "+ 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(180+0, 180+91, 10)] ax.plot(*np.array(front_coords).T, ':' if", "population(country) total_per_1M = trajs_orig[country].loc[last_day, f\"total_{kind}\"] / pop heading = \"{:d} {:s}/M\".format(int(round(total_per_1M)), kind) ax.annotate(text=heading,", "mob_col, Rt_col = f\"mobility_historical_{kind}\", f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day,", "scale) return sns.set_hls_values(color=color, h=None, l=lightness, s=None) def pareto_front(data, optima=True): sorted_data = sorted(data, key=itemgetter(0,", "# PLOT: export and return fig.tight_layout(w_pad=0.4, h_pad=0.15) l, b, w, h = fig.axes[-1].get_position().bounds", "not be set. Check tick labels in generated figures.') # -- Shared plot", "'Russia': (0.80, 0.45, 0.15), 'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden':", "\" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates", "zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else ['bottom']) ax.set_xticks(()) continue # PLOT: X-axis", "'Slovakia': (0.25, 0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland':", "alpha=1, solid_capstyle='round', zorder=20) # PLOT: dots + thin black x, y = df[[mob_col,", "df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df def make_sqrt_deaths_yaxis(ax, ymax=40, sep=5):", "trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval ==", "(0.30, 0.75, 1.00), 'Bolivia': (0.20, 0.65, 0.00), 'Brazil': (0.00, 0.70, 0.20), 'Chile': (0.65,", "rgb_to_hls from pandas.plotting import register_matplotlib_converters import locale import dill import gzip from shared", "- (0 if location not in label_shifts else label_shifts[location][0]), y**0.9999 * (1 if", "0.03*show_tail, 0.94), xycoords='axes fraction', color=ANNOT_COLOR) # export coordinates if panel_letter is not None:", "'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE = '#77ffaa', '#aabbdd', '#885500'", "Kingdom', 'UK') if italic: loc = r'\\textit{' + loc + r'}' if label_shifting", "make_subplot_(axes, trajs, locations, final_day, show_fronts=False, panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" #", "ha=\"center\", va=\"center\", zorder=30, clip_on=False) def put_legend_deaths(ax_leg): z = [1, 3, 10, 30, 100,", "= deaths.resample('W').sum().cumsum() / ppl df = immobi.join(deaths).rename(columns={ 'mobility_reduction': f\"immobilization_cumul_{country}\", 'new_deaths': f\"new_deaths_cumul_per_1M_{country}\"}) return df", "ncols=ncols, figsize=(8/5*ncols, 8/6*nrows)) for ci, country in enumerate(locations): ax = fig.axes[ci] ax.set_facecolor(facecolor) #", "thr_weekly_cases_per_1M): z = [3, 10, 30, 100, 300, 1000, 3000, 10000] x =", "100, 300] x = np.array(list(range(len(z)))) y2 = np.ones(len(x))*0.37 ax_leg.set_xlim((0-0.1, len(z)-1+0.1)) ax_leg.set_ylim((0, 1)) #", "pd.DataFrame(columns='location cumul_2020H1_mobility_reduction gdp_2020H1_drop'.split()) df = df.set_index('location') for loc in locations: if not loc", "if italic: loc = r'\\textit{' + loc + r'}' if label_shifting == 'A':", "= trajectories[country] immobi = -trajectory[['mobility_reduction']] deaths = trajectory[['new_deaths']].astype('Float64') ppl = population(country) if interval", "+ 1e6*is_USA_state(cc) locations = sorted(locations, key=by_per_capita, reverse=True) facecolor = '#f8f6f4' ncols = 6", "alpha = 0.7 ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for i in range(1,", "y3 = np.ones(len(x))*0.0 ax_leg.set_xlim((0 +0, len(z)-1 -0)) ax_leg.set_ylim((0, 1)) # tracer line for", "R #pylint: disable = E1101 # no-member (generated-members) #pylint: disable = C0302 #", "least 1} new death /week /M', xy=(0.5, 0.22), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\")", "= np.array([x, y]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) tests_per_hit = df.loc[stretch,", "diameter = np.sqrt(population(location)) * 3 light_color = color_of(location) ax.plot([x], [y], '-.', marker='8' if", "/M', xy=(0.5, 0.62-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M)", "= df[above_min_cumul_indices] # DATA: nullify missing days to obtain visual discontinuities for missing_day", "xycoords='axes fraction', color=ANNOT_COLOR) ax.annotate(r\"(non-weighted) Pearson's $\\rho$ = \" + f\"{rho:.2f} (Europe-only: {rho_eur:.2f})\", xy=(0.155,", "# _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def put_legend_cases(ax_leg, thr_weekly_cases_per_1M): z", "fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day, missing_days, fig_name, kind='cases',", "ax.plot(im[-1], de[-1], '.', marker='o', markersize=1., markeredgewidth=0, markerfacecolor=darker_color, alpha=alpha) finals[loc] = (im[-1], de[-1]) if", "in spines: if loc == 'left': spine.set_position(('outward', left_shift)) elif loc == 'bottom': spine.set_position(('outward',", "+ panel_letter + r'}', xy=(-0.175, 1.04), xycoords='axes fraction', clip_on=False) ax = axes[1].inset_axes([0.92, 0.09,", "spine in ax.spines.items(): if loc in spines: if loc == 'left': spine.set_position(('outward', left_shift))", "up the figure if show_corr_history: fig, axes = plt.subplots(ncols=2, figsize=(11, 5)) for i,", "row[['cumul_2020H1_mobility_reduction', 'gdp_2020H1_drop']] ax.scatter([mob_red], [gdp_drop], color=color, zorder=10) ax.annotate(text=location.replace('United Kingdom', 'UK'), xy=(mob_red + 49, gdp_drop", "draw_pareto_fronts_(ax, finals, n_fronts, optimal): fronts = [] for i in range(n_fronts): fronts_locations =", "ANNOT_COLOR = '#777777' def color_of(country, dull_color=(0.15, 0.15, 0.15)): colors = { 'Austria': plt.cm.tab10(6),", "itemgetter from multiprocessing import Pool import pandas as pd import seaborn as sns", "min(1, 0.075 + ((lwidths[0][segi] - 0.)/7)**1.3) co = sns.set_hls_values(SNAIL_GREEN, l=el) ax_leg.plot(seg[0]+0.05, seg[1], '-',", "color=ANNOT_COLOR) # export coordinates csv_fn = f\"Figure{fig_name}.csv\" np.savetxt(csv_fn, df.values, header='lockdown,gdp_loss', delimiter=',') # export", "reverse=True) facecolor = '#f8f6f4' ncols = 6 nrows = (len(locations))//ncols + 1 fig,", "mpl.plot df.sort_index(inplace=True) if kind == 'cases': # ==--- # PLOT: pink tracer line", "xy=(0.5, 0.31-0.09), xycoords='axes fraction', fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'no data on testing', xy=(0.5, 0.055),", "xycoords='data', xy=(x + 65 - (0 if location not in label_shifts else label_shifts[location][0]),", "(0.50, 0.55, 0.00), 'Czechia': plt.cm.tab10(4), 'Denmark': (0.85, 0.20, 0.00), 'Finland': plt.cm.tab10(9), 'France': (0.95,", "df[df_freq >= thr_weekly_cases_per_1M].index green_stretches, nongreen_stretches = [], [] last_index_is_green = None for index,", "< cutoff) or (not optima and sd[1] > cutoff): front += [sd[2]] cutoff", "0.25), } if country in colors.keys(): return colors[country] else: return dull_color def correlations(values,", "locations, cols, force_end=None, skipped=None, cleanup=True, verbose=False): assert len(cols) == 2 col1, col2 =", "0.8 + 1.2*np.sin((180 + i)/360*2*3.14159)] for i in range(0, 91, 10)] else: front_coords", "ax.scatter(x, y, s=0.025, marker='o', facecolor='#000000', alpha=0.5, clip_on=True, zorder=30) ax.plot(x, y, linestyle='--', linewidth=0.1, color='#000000',", "OUT_OF_FRONT = ['Greece', 'Hungary', 'Canada', 'Netherlands', 'Czechia'] # colors: SNAIL_GREEN, SNAIL_NONGREEN, SNAIL_ORANGE =", "show_tail: color = color_of(loc) darker_color = darken(color_of(loc)) alpha = 0.7 ax.plot(im, de, '-',", "not loc in gdp_2020h1: print(f\"{loc}: missing GDP data in figure {fig_name}\") continue gdp_drop", "show_fronts=False, panel_letter='_') # export fig.tight_layout() fn = f\"Figure{fig_name}.pdf\" # _{last_day.strftime('%b%d')} fig.savefig(fn) print(f\"Saved figure", "weights=weights).corrcoef[0][1] return (rho, wrho) def adjust_spines(ax, spines, left_shift=15, bottom_shift=0): for loc, spine in", "Pearson's $\\rho$ = \" + f\"{wrho:.2f} (Europe-only: {wrho_eur:.2f})\", xy=(0.155, 0.94), xycoords='axes fraction', color=ANNOT_COLOR)", "list(range(0, ymax + sep, sep)) ax.set_yticks(ticks) ax.set_yticklabels(['0'] + [r'$\\sqrt{' + str(t**2) + '}$'", "last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green = True elif index in nongreen_indices:", "'-', linewidth=0.8, alpha=alpha, color=color) for i in range(1, len(im)): m, ms = [('s',", "0.90, 0.50), 'Slovenia': plt.cm.tab10(1), 'Spain': plt.cm.tab10(3), 'Sweden': (0.10, 0.20, 0.90), 'Switzerland': (1.00, 0.05,", "f\"Rt_{kind}\" last_day, trajs_trimmed = jointly_trimmed_trajs(trajs, locations, [mob_col, Rt_col], force_end=final_day, skipped=low_mortality_locations) def by_per_capita(cc): if", "2020') ax.set_ylabel(r'GDP loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000))", "delimiter=',') cols = ['mobility', 'new_deaths'] # set up the figure if show_corr_history: fig,", "fig.savefig(fn) print(f\"Saved figure file {fn}.\") return fig def plot_gdp_drop_and_excess_deaths(trajectories, locations, final_day, excess_deaths, gdp_2020h1,", "+ 0.005*vi), xycoords='data', fontsize=6, ha=\"center\", va=\"center\", zorder=30, clip_on=False, color='black') def plot_R_vs_mobility_reduction(trajs, locations, final_day,", "# dots + thin black for y in [y2]: xx, yy = x[:-1],", "for index, value in df.iterrows(): if index in green_indices: if last_index_is_green is None", "alpha=alpha) finals[loc] = (im[-1], de[-1]) if show_fronts: draw_pareto_fronts_(ax, finals, n_fronts=3+2, optimal=True) draw_pareto_fronts_(ax, finals,", "fontsize=6.5, ha=\"center\", va=\"center\") ax_leg.annotate(text=r'when \\textbf{$<$ ' + str(thr_weekly_cases_per_1M) + '} ' r'new cases", "plt import matplotlib.dates as dts import matplotlib.ticker as tckr import matplotlib.patheffects as pthff", "reverse=not optima) # x-ascending front = [ sorted_data[0][2] ] cutoff = sorted_data[0][1] for", "ca.index.shift(+days_back, freq ='D') ) # <-- THIS #de = de.set_index( de.index.shift(-days_back, freq ='D')", "'daily').loc[final_day] df.loc[loc] = [immob, gdp_drop] fig, ax = plt.subplots(figsize=(5, 5)) adjust_spines(ax, ['left', 'bottom'],", "excess_deaths else 0, trajectories[loc].loc[final_day]['total_deaths']) x, y = -gdp_2020h1[loc], np.sqrt(deaths / population(loc) ) put_final_dot(ax,", "ax.plot(im, de, '-', linewidth=0.8, alpha=alpha, color=color) for i in range(1, len(im)): m, ms", "cutoff) or (not optima and sd[1] > cutoff): front += [sd[2]] cutoff =", "for country in locations: if skipped and country in skipped: continue df =", "loss in the 1\\textsuperscript{st} half of 2020 (year-on-year \\%)') ax.set_xlim((0, 5000)) ax.set_ylim((-2, 14))", "< 0, 0) lwidths = [1*np.log(1 + z)] for segi, seg in enumerate(segments):", "in gdp_2020h1: print(f\"{loc}: missing GDP data in figure {fig_name}\") continue gdp_drop = -gdp_2020h1[loc]", "fraction', fontsize=6.5, color='#666666', ha=\"center\", va=\"center\", clip_on=False, zorder=100) adjust_spines(ax, ['left', 'bottom'] if ax.is_first_col() else", "'North Carolina': (-10, 0.97 ), 'Pennsylvania': ( 0, 0.999), 'Georgia': (825, 0.991) }", "green_stretches += [ [index] ] elif last_index_is_green == True: green_stretches[-1] += [index] last_index_is_green", "in range(1, len(im)): m, ms = [('s', 1.7), ('D', 1.55), ('p', 2.2)][i %", "slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(*df.values.T) ax.plot([0, 5000], [intercept, intercept + slope*5000],", "1.018), 'Florida': ( 0, 0.987), 'Illinois': ( 90, 1.016), 'North Carolina': (-10, 0.97", "marker='o', facecolor='#000000', alpha=0.5, clip_on=False, zorder=30) ax_leg.plot(xx + 0.5, yy, linestyle='--', linewidth=0.1, color='#000000', alpha=0.33,", "# PLOT: Y-axis if ax.is_first_col(): ax.set_ylabel(r'$R$') ax.set_ylim((0, 4)) ax.yaxis.set_major_locator(tckr.MultipleLocator(1)) ax.axhline(1, linestyle='--', linewidth=0.5, color='#666666')", "ax.set_xlabel(r'Cumulative lockdown') ax.set_ylabel(r'$\\sqrt{\\textrm{\\sf Cumulative deaths/M}}$') make_sqrt_deaths_yaxis(ax) # plot \"flares\" (tails are optional) finals", "ax.set_xlabel(r'Cumulated mobility reduction in the 1\\textsuperscript{st} half of 2020') ax.set_ylabel(r'GDP loss in the", "not loc in STATE_TO_ABBREV and not loc in ['Canada', 'Taiwan', 'Japan', 'South Korea']" ]
[ "from utilities.models import BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name", "from django.utils.translation import ugettext_lazy as _ from utilities.models import BaseDateTime class Contact(BaseDateTime): title", "from django.db import models from django.utils.translation import ugettext_lazy as _ from utilities.models import", "BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'),", "_('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'),", "models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta:", "def __unicode__(self): return self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by", "= _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',) db_table =", "import BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField(", "= models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100 ) email =", "Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100 )", "max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255", "django.db import models from django.utils.translation import ugettext_lazy as _ from utilities.models import BaseDateTime", "models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL')", "body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural", "__unicode__(self): return self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by =", "_('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def", "= models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class", "name = models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body", "max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self):", "return self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created'", ") name = models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255 )", "django.utils.translation import ugettext_lazy as _ from utilities.models import BaseDateTime class Contact(BaseDateTime): title =", "= _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',) db_table = 'contact_form_contacts' app_label =", "= models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural =", "email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name", "max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta: verbose_name =", "self.name class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering", "models from django.utils.translation import ugettext_lazy as _ from utilities.models import BaseDateTime class Contact(BaseDateTime):", "verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',) db_table", "class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100", "title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100 ) email", "Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',)", "= models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body =", "_('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',) db_table = 'contact_form_contacts' app_label = 'contact_form'", "_ from utilities.models import BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 )", "class Meta: verbose_name = _('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering =", "models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL'))", "utilities.models import BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255 ) name =", "models.CharField( _('TITLE_LABEL'), max_length=255 ) name = models.CharField( _('NAME_LABEL'), max_length=100 ) email = models.EmailField(", ") body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta: verbose_name = _('CONTACTS_TITLE')", "import ugettext_lazy as _ from utilities.models import BaseDateTime class Contact(BaseDateTime): title = models.CharField(", ") email = models.EmailField( _('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return", "_('CONTACTS_TITLE') verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',) db_table = 'contact_form_contacts'", "_('EMAIL_LABEL'), max_length=255 ) body = models.TextField(_('MESSAGE_LABEL')) def __unicode__(self): return self.name class Meta: verbose_name", "verbose_name_plural = _('CONTACTS_TITLE_PLURAL') get_latest_by = 'created' ordering = ('-id',) db_table = 'contact_form_contacts' app_label", "as _ from utilities.models import BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'), max_length=255", "ugettext_lazy as _ from utilities.models import BaseDateTime class Contact(BaseDateTime): title = models.CharField( _('TITLE_LABEL'),", "import models from django.utils.translation import ugettext_lazy as _ from utilities.models import BaseDateTime class" ]
[ "verbose: `True` to increase verbosity \"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name", "coherence. It contains user-generated texts from Reddit and Wikipedia. Annotation labels are: *", "download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class for loading the DDisco dataset. The", "os import pandas as pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class", "for discourse coherence. It contains user-generated texts from Reddit and Wikipedia. Annotation labels", "def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir", "the directory for storing cached models :param bool verbose: `True` to increase verbosity", "_unzip_process_func, DATASETS class DDisco: \"\"\" Class for loading the DDisco dataset. The DDisco", "download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco dataset in dataframes with", "bool verbose: `True` to increase verbosity \"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR):", "self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name +", "* 3: high coherence :param str cache_dir: the directory for storing cached models", "high coherence :param str cache_dir: the directory for storing cached models :param bool", "self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self):", "from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class for loading", "self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco dataset in", "cached models :param bool verbose: `True` to increase verbosity \"\"\" def __init__(self, cache_dir:", ":param bool verbose: `True` to increase verbosity \"\"\" def __init__(self, cache_dir: str =", "dataframes with pandas. :return: 2 dataframes -- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir,", "load_with_pandas(self): \"\"\" Loads the DDisco dataset in dataframes with pandas. :return: 2 dataframes", "= pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir,", "annotated for discourse coherence. It contains user-generated texts from Reddit and Wikipedia. Annotation", "pandas. :return: 2 dataframes -- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name +", "The DDisco dataset is annotated for discourse coherence. It contains user-generated texts from", "dataframes -- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension),", "self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension), sep='\\t',", "directory for storing cached models :param bool verbose: `True` to increase verbosity \"\"\"", "increase verbosity \"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension", "import os import pandas as pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS", "to increase verbosity \"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco'", "dataset. The DDisco dataset is annotated for discourse coherence. It contains user-generated texts", ":return: 2 dataframes -- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train'", "cache_dir: the directory for storing cached models :param bool verbose: `True` to increase", "import pandas as pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco:", "pandas as pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\"", "self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the", "'.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' +", "= DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco", "__init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir =", "the DDisco dataset in dataframes with pandas. :return: 2 dataframes -- train, test", "DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def", ":param str cache_dir: the directory for storing cached models :param bool verbose: `True`", "verbosity \"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension =", "+ '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test'", "2 dataframes -- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' +", "dataset in dataframes with pandas. :return: 2 dataframes -- train, test \"\"\" df_train", "sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension), sep='\\t', index_col=0,", "is annotated for discourse coherence. It contains user-generated texts from Reddit and Wikipedia.", "DDisco dataset in dataframes with pandas. :return: 2 dataframes -- train, test \"\"\"", "medium coherence * 3: high coherence :param str cache_dir: the directory for storing", "contains user-generated texts from Reddit and Wikipedia. Annotation labels are: * 1: low", "loading the DDisco dataset. The DDisco dataset is annotated for discourse coherence. It", "'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads", "Loads the DDisco dataset in dataframes with pandas. :return: 2 dataframes -- train,", "class DDisco: \"\"\" Class for loading the DDisco dataset. The DDisco dataset is", "cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name,", "DDisco: \"\"\" Class for loading the DDisco dataset. The DDisco dataset is annotated", "Reddit and Wikipedia. Annotation labels are: * 1: low coherence * 2: medium", "It contains user-generated texts from Reddit and Wikipedia. Annotation labels are: * 1:", "cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco dataset in dataframes with pandas. :return:", "df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test =", "DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco dataset", "* 2: medium coherence * 3: high coherence :param str cache_dir: the directory", "* 1: low coherence * 2: medium coherence * 3: high coherence :param", "\"\"\" Class for loading the DDisco dataset. The DDisco dataset is annotated for", "discourse coherence. It contains user-generated texts from Reddit and Wikipedia. Annotation labels are:", "\"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension']", "index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna()", "models :param bool verbose: `True` to increase verbosity \"\"\" def __init__(self, cache_dir: str", "df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() return df_train,", "\"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test", "dataset is annotated for discourse coherence. It contains user-generated texts from Reddit and", "encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() return", "are: * 1: low coherence * 2: medium coherence * 3: high coherence", "`True` to increase verbosity \"\"\" def __init__(self, cache_dir: str = DEFAULT_CACHE_DIR): self.dataset_name =", "in dataframes with pandas. :return: 2 dataframes -- train, test \"\"\" df_train =", "import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class for loading the DDisco", "user-generated texts from Reddit and Wikipedia. Annotation labels are: * 1: low coherence", "= DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir)", "3: high coherence :param str cache_dir: the directory for storing cached models :param", "+ self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension),", "coherence * 2: medium coherence * 3: high coherence :param str cache_dir: the", "texts from Reddit and Wikipedia. Annotation labels are: * 1: low coherence *", "the DDisco dataset. The DDisco dataset is annotated for discourse coherence. It contains", "DDisco dataset is annotated for discourse coherence. It contains user-generated texts from Reddit", "Wikipedia. Annotation labels are: * 1: low coherence * 2: medium coherence *", "as pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class", "DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class for loading the DDisco dataset.", "process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco dataset in dataframes with pandas.", "\"\"\" Loads the DDisco dataset in dataframes with pandas. :return: 2 dataframes --", "2: medium coherence * 3: high coherence :param str cache_dir: the directory for", "train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0,", "DDisco dataset. The DDisco dataset is annotated for discourse coherence. It contains user-generated", "= 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\"", "= pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.test' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() return df_train, df_test", "pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna() df_test = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name", "1: low coherence * 2: medium coherence * 3: high coherence :param str", "from Reddit and Wikipedia. Annotation labels are: * 1: low coherence * 2:", "coherence * 3: high coherence :param str cache_dir: the directory for storing cached", "-- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t',", "DATASETS class DDisco: \"\"\" Class for loading the DDisco dataset. The DDisco dataset", "storing cached models :param bool verbose: `True` to increase verbosity \"\"\" def __init__(self,", "for storing cached models :param bool verbose: `True` to increase verbosity \"\"\" def", "labels are: * 1: low coherence * 2: medium coherence * 3: high", "low coherence * 2: medium coherence * 3: high coherence :param str cache_dir:", "coherence :param str cache_dir: the directory for storing cached models :param bool verbose:", "= download_dataset(self.dataset_name, process_func=_unzip_process_func, cache_dir=cache_dir) def load_with_pandas(self): \"\"\" Loads the DDisco dataset in dataframes", "def load_with_pandas(self): \"\"\" Loads the DDisco dataset in dataframes with pandas. :return: 2", "for loading the DDisco dataset. The DDisco dataset is annotated for discourse coherence.", "<gh_stars>1-10 import os import pandas as pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func,", "danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class for loading the", "str = DEFAULT_CACHE_DIR): self.dataset_name = 'ddisco' self.file_extension = DATASETS[self.dataset_name]['file_extension'] self.dataset_dir = download_dataset(self.dataset_name, process_func=_unzip_process_func,", "and Wikipedia. Annotation labels are: * 1: low coherence * 2: medium coherence", "test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name + '.train' + self.file_extension), sep='\\t', index_col=0, encoding='utf-8').dropna()", "Annotation labels are: * 1: low coherence * 2: medium coherence * 3:", "Class for loading the DDisco dataset. The DDisco dataset is annotated for discourse", "pd from danlp.download import DEFAULT_CACHE_DIR, download_dataset, _unzip_process_func, DATASETS class DDisco: \"\"\" Class for", "str cache_dir: the directory for storing cached models :param bool verbose: `True` to", "with pandas. :return: 2 dataframes -- train, test \"\"\" df_train = pd.read_csv(os.path.join(self.dataset_dir, self.dataset_name" ]
[ "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N'): print(\"c1 {} f1 {}", "StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1", "{} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP: print(t_r) me", "f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1", "print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me)", "From OceanOfCode # Class from OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo from", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('NA'): print(\"c1 {} f1 {} d1", "# Class from OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode import", "for c1, f1, d1 in update_order('MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "# Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS # Method from", "for i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp))", "c1, f1, d1 in update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self):", "print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP: print(t_r)", "from OceanOfCode import DIRS # Method from OceanOfCode import manhattan from OceanOfCode import", "in update_order('SURFACE 1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me)", "print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence =", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None)", "read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me)", "print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in", "print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N'): print(\"c1 {} f1 {} d1", "in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in", "update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None:", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N'): print(\"c1 {}", "= StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me =", "update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not", "print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self):", "def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in range(0,5):", "f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me =", "d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) if", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me =", "me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO 0 0|MOVE E'):", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me", "in update_order('MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me)", "StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me", "me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me =", "TREASURE_MAP # From OceanOfCode # Class from OceanOfCode import StalkAndLegal from OceanOfCode import", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E'])", "EMPTY_SYMBOLS from OceanOfCode import DIRS # Method from OceanOfCode import manhattan from OceanOfCode", "TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE", "me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for", "if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4))", "StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i", "{}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1,", "f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1", "me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1", "variables from test2.test_main import TREASURE_MAP # From OceanOfCode # Class from OceanOfCode import", "me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me", "f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me)", "StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'):", "1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def", "_small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N'):", "f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP: print(t_r) me =", "from OceanOfCode import Submarine from OceanOfCode import Board # Global from OceanOfCode import", "= StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo =", "in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not", "OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode", "N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1)", "{} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None)", "from OceanOfCode import update_order import unittest class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None)", "import Submarine from OceanOfCode import Board # Global from OceanOfCode import EMPTY_SYMBOLS from", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp))", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me", "print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {}", "= StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk =", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1", "Class from OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine", "import EMPTY_SYMBOLS from OceanOfCode import DIRS # Method from OceanOfCode import manhattan from", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None)", "print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4))", "board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def", "print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me", "me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me =", "in update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is", "= StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1 {}", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me)", "d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N'): print(\"c1 {} f1", "print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in", "import StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode import Board # Global from", "update_order import unittest class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def", "StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me)", "import unittest class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self):", "print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me", "me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo", "c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if", "= StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1,", "print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {}", "StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1 {} f1", "N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1)", "me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me =", "_read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me)", "in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP)", "1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1)", "me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me =", "test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move", "update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP:", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1)", "f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if", "print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self):", "unittest class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp))", "_medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SURFACE", "d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is", "StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode import Board", "print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6))", "for c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1", "print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me", "me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1,", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp))", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None)", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move", "N|MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None:", "OceanOfCode import Board # Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS", "from OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode import Board #", "import sys sys.path.append('../../') # Global variables from test2.test_main import TREASURE_MAP # From OceanOfCode", "= StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def", "<filename>test2/stalking/stalking.py import sys sys.path.append('../../') # Global variables from test2.test_main import TREASURE_MAP # From", "import StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode import", "import Board # Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS #", "print(len(me.inp)) def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in", "c1, f1, d1 in update_order('SURFACE 1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1)", "test2.test_main import TREASURE_MAP # From OceanOfCode # Class from OceanOfCode import StalkAndLegal from", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp))", "for c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1,", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {}", "sys.path.append('../../') # Global variables from test2.test_main import TREASURE_MAP # From OceanOfCode # Class", "read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me", "def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE", "OceanOfCode import update_order import unittest class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "{}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp))", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "for c1, f1, d1 in update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp))", "me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1", "d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP)", "Submarine from OceanOfCode import Board # Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode", "= StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1", "f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me =", "in update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('NA'): print(\"c1 {} f1", "print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me =", "OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS # Method from OceanOfCode import manhattan", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move", "Global variables from test2.test_main import TREASURE_MAP # From OceanOfCode # Class from OceanOfCode", "f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me =", "{} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me =", "{}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) if __name__", "print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me", "5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not", "print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "11 1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is not None:", "None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "= StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo", "me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me)", "if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) if __name__ ==", "= StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp))", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me)", "for c1, f1, d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1 {}", "_read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me)", "def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2", "if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1,", "Board # Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS # Method", "{}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE 1'):", "= StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE 1'): print(\"c1 {} f1", "= StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S'])", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1", "f1, d1 in update_order('SURFACE 1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {}", "read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp))", "me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo", "me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for", "from OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine from", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me =", "me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me =", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None)", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S'])", "= next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me", "in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if", "{} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in", "me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me", "for c1, f1, d1 in update_order('SURFACE 1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO", "Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS # Method from OceanOfCode", "me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE 1'): print(\"c1", "update_order('SURFACE 1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp))", "OceanOfCode # Class from OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo from OceanOfCode", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me)", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6))", "DIRS # Method from OceanOfCode import manhattan from OceanOfCode import update_order import unittest", "from OceanOfCode import Board # Global from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import", "_set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SILENCE'):", "class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me =", "manhattan from OceanOfCode import update_order import unittest class _stalking(unittest.TestCase): def _set_up(self): me =", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp))", "me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me =", "is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in", "StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1", "OceanOfCode import DIRS # Method from OceanOfCode import manhattan from OceanOfCode import update_order", "read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp))", "StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1", "me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE 1'): print(\"c1 {}", "print(len(me.inp)) for c1, f1, d1 in update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self):", "StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode import Board # Global from OceanOfCode", "StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp))", "0|MOVE E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp))", "sys sys.path.append('../../') # Global variables from test2.test_main import TREASURE_MAP # From OceanOfCode #", "is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) if __name__ == '__main__': unittest.main()", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def", "f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {} d1", "me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp)) def _small_read_and_update(self): me", "f1, d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me)", "me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface =", "import update_order import unittest class _stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence = StalkAndTorpedo.read_silence2 read_move =", "d1 in update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1", "_read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP)", "= StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print()", "# Global variables from test2.test_main import TREASURE_MAP # From OceanOfCode # Class from", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me =", "N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1 is", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N'): print(\"c1", "print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface", "def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me =", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo =", "d1 in update_order('MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me =", "update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me =", "d1 in update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r", "StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me", "= StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1,", "Method from OceanOfCode import manhattan from OceanOfCode import update_order import unittest class _stalking(unittest.TestCase):", "= StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me) print(len(me.inp))", "= StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S'])", "def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('NA'):", "for t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1,", "= StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11", "in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me", "d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def", "_stalking(unittest.TestCase): def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None)", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board,", "d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for", "for c1, f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {} d1", "print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE 1'): print(\"c1 {} f1 {} d1", "OceanOfCode import manhattan from OceanOfCode import update_order import unittest class _stalking(unittest.TestCase): def _set_up(self):", "d1 {}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me", "me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO 0 0|MOVE", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('NA'): print(\"c1 {} f1 {}", "print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1,", "{}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self):", "print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "{} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me", "print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|SILENCE'): print(\"c1 {} f1 {} d1", "read_move = StalkAndTorpedo.read_move me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp))", "{} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None)", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me =", "me = StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1,", "N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for", "= StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move =", "{} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1,", "update_order('MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp))", "_silence(self): for t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1,", "def _read_torpedo(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me =", "0 0|MOVE E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me)", "t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1", "StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_torpedo = StalkAndTorpedo.read_torpedo read_silence", "{}\".format(c1,f1,d1)) def _silence(self): for t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp))", "c1, f1, d1 in update_order('MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1)", "f1, d1 in update_order('MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me", "c1, f1, d1 in update_order('MOVE N|MOVE N'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me)", "c1, f1, d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1 {} d1", "update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) if f1", "StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {}", "import TREASURE_MAP # From OceanOfCode # Class from OceanOfCode import StalkAndLegal from OceanOfCode", "print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me", "StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me", "d1 in update_order('SURFACE 1'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me =", "from OceanOfCode import manhattan from OceanOfCode import update_order import unittest class _stalking(unittest.TestCase): def", "= StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface for", "def _set_up(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move", "def _silence(self): for t_r in TREASURE_MAP: print(t_r) me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for", "E'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for", "me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|MOVE N'):", "def _small_read_and_update(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE", "OceanOfCode import Submarine from OceanOfCode import Board # Global from OceanOfCode import EMPTY_SYMBOLS", "print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO 0 0|MOVE E'): print(\"c1 {} f1", "not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP)", "print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in", "is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None)", "StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('SURFACE 1'): print(\"c1 {} f1 {}", "from OceanOfCode import EMPTY_SYMBOLS from OceanOfCode import DIRS # Method from OceanOfCode import", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_silence,None) me = StalkAndTorpedo(me)", "# Method from OceanOfCode import manhattan from OceanOfCode import update_order import unittest class", "OceanOfCode import StalkAndTorpedo from OceanOfCode import Submarine from OceanOfCode import Board # Global", "f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) if __name__ == '__main__':", "range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me", "import manhattan from OceanOfCode import update_order import unittest class _stalking(unittest.TestCase): def _set_up(self): me", "me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('NA'): print(\"c1 {}", "me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(5,6)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me =", "me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) def _na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for", "StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp))", "{}\".format(c1,f1,d1)) me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('TORPEDO 0", "StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W'])", "StalkAndTorpedo.read_torpedo me.update(read_torpedo,(8,4)) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_torpedo,(6,6)) me = StalkAndTorpedo(me) print(len(me.inp)) def test_read_silence(self):", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) def _read_move(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N'])", "me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) read_move = StalkAndTorpedo.read_move me.update(read_move,['N']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['E']) me =", "d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1))", "print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self): me = StalkAndTorpedo(None)", "me = StalkAndTorpedo(me) print(len(me.inp)) def _read_surface(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) read_surface = StalkAndTorpedo.read_surface", "None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE N|MOVE", "me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['S']) me = StalkAndTorpedo(me) print(len(me.inp)) me.update(read_move,['W']) me = StalkAndTorpedo(me)", "= StalkAndTorpedo(me) print(len(me.inp)) def _medium_read_and_update(self): me = StalkAndTorpedo(None) me.set_up(TREASURE_MAP) print(len(me.inp)) for c1, f1,", "f1, d1 in update_order('NA'): print(\"c1 {} f1 {} d1 {}\".format(c1,f1,d1)) def _silence(self): for", "i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life)", "read_surface = StalkAndTorpedo.read_surface for i in range(0,5): me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) board, stalk", "_na_read(self): me = StalkAndTorpedo(None) me.set_up((1,2,3,4,5,6,7,8,9),TREASURE_MAP) print(len(me.inp)) for c1, f1, d1 in update_order('NA'): print(\"c1", "not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp)) for c1, f1, d1 in update_order('MOVE", "stalk = next(iter(me.inp)) print(len(me.inp)) print(board.life) me.update(read_surface,TREASURE_MAP) me = StalkAndTorpedo(me) print() print(len(me.inp)) def _read_torpedo(self):", "import DIRS # Method from OceanOfCode import manhattan from OceanOfCode import update_order import", "{} d1 {}\".format(c1,f1,d1)) if f1 is not None: me.update(f1,d1) me = StalkAndTorpedo(me) print(len(me.inp))", "c1, f1, d1 in update_order('MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'): print(\"c1 {} f1 {}", "# From OceanOfCode # Class from OceanOfCode import StalkAndLegal from OceanOfCode import StalkAndTorpedo", "from test2.test_main import TREASURE_MAP # From OceanOfCode # Class from OceanOfCode import StalkAndLegal" ]
[ "[ BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M =", "x = image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2):", "preprocess_input(x) # preds = model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671,", "import test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import", "# x = _get_elephant((224, 224)) # x = preprocess_input(x) # preds = model.predict(x)", "raise AssertionError( 'Shapes are different rank: %s vs %s' % (shape1, shape2)) if", "BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad", "_assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise AssertionError( 'Shapes are different rank: %s", "when loading ImageNet weights (since it is possible). # In this case, default", "= preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) #", "step, # the default is to accept variable-size inputs # even when loading", "def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise AssertionError( 'Shapes are different rank:", "test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1,", "] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES", "MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES =", "_IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x = preprocess_input(x) preds = model.predict(x) label =", "np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise AssertionError( 'Shapes are", "rank: %s vs %s' % (shape1, shape2)) if shape1 != shape2: raise AssertionError('Shapes", "# # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model = app() #", "299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return", "import numpy as np from absl.testing import parameterized from keras.preprocessing import image from", "None: target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size))", "include a Flatten step, # the default is to accept variable-size inputs #", "inputs # even when loading ImageNet weights (since it is possible). # In", "(299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img)", "= np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): #", "possible). # In this case, default to 299x299. if target_size[0] is None: target_size", "= app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299, 299)) # x", "app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224,", "preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self,", "self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): # For models that don't include a", "default is to accept variable-size inputs # even when loading ImageNet weights (since", "test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x,", "21843)) # x = _get_elephant((224, 224)) # x = preprocess_input(x) # preds =", "keras.preprocessing import image from keras.utils import data_utils from tensorflow.python.platform import test from ..bit", "# For models that don't include a Flatten step, # the default is", "are different rank: %s vs %s' % (shape1, shape2)) if shape1 != shape2:", "(shape1, shape2)) if shape1 != shape2: raise AssertionError('Shapes differ: %s vs %s' %", "shape2)) if shape1 != shape2: raise AssertionError('Shapes differ: %s vs %s' % (shape1,", "_get_elephant((224, 224)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label,", "# even when loading ImageNet weights (since it is possible). # In this", "is None: target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image,", "= data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0)", "BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights #", "@parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299,", "model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) # #", "loading ImageNet weights (since it is possible). # In this case, default to", "app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299, 299)) # x =", "models that don't include a Flatten step, # the default is to accept", "to accept variable-size inputs # even when loading ImageNet weights (since it is", "# In this case, default to 299x299. if target_size[0] is None: target_size =", "def test_application_predict_even_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x", "[3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model", "!= len(shape2): raise AssertionError( 'Shapes are different rank: %s vs %s' % (shape1,", "BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from", "img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2):", "BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase):", "import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S = [", "# Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3,", "BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4", "_assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224, 224)) # x = preprocess_input(x) #", "# @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None,", "# _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299, 299)) # x = preprocess_input(x)", "x = _get_elephant((299, 299)) # x = preprocess_input(x) # preds = model.predict(x) #", "AssertionError('Shapes differ: %s vs %s' % (shape1, shape2)) if __name__ == '__main__': test.main()", "BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH", "keras.utils import data_utils from tensorflow.python.platform import test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1,", "data_utils from tensorflow.python.platform import test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4", "model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model", "_assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x = preprocess_input(x) preds = model.predict(x)", "self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model = app()", "x = _get_elephant((224, 224)) # x = preprocess_input(x) # preds = model.predict(x) #", "np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def", "= _get_elephant((224, 224)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1)", "don't include a Flatten step, # the default is to accept variable-size inputs", "shape1 != shape2: raise AssertionError('Shapes differ: %s vs %s' % (shape1, shape2)) if", "tensorflow.python.platform import test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit", "(None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x = preprocess_input(x) preds = model.predict(x) label", "BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def", "_assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299, 299)) # x = preprocess_input(x) #", "= image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if", "@parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843))", "BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit", "from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3,", "= app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224, 224)) # x", "np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape,", "default to 299x299. if target_size[0] is None: target_size = (299, 299) test_image =", "BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/'", "test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x", "BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S)", "21843)) # x = _get_elephant((299, 299)) # x = preprocess_input(x) # preds =", "TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self,", "x = _get_elephant((224, 224)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0],", "# self.assertIn(label, [3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app):", "target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) !=", "386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model = app() # _assert_shape_equal(model.output_shape,", "vs %s' % (shape1, shape2)) if shape1 != shape2: raise AssertionError('Shapes differ: %s", "app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x = preprocess_input(x) preds =", "# _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224, 224)) # x = preprocess_input(x)", "def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299))", "= model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) #", "test_application_predict_even_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x =", "image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1)", "def test_application_predict_odd_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x", "from keras.preprocessing import image from keras.utils import data_utils from tensorflow.python.platform import test from", "BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import", "..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1,", "preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4", "('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model", "# x = preprocess_input(x) # preds = model.predict(x) # label = np.argmax(preds[0], axis=-1)", "_get_elephant(target_size): # For models that don't include a Flatten step, # the default", "For models that don't include a Flatten step, # the default is to", "@parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224,", "_assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x = preprocess_input(x) preds = model.predict(x)", "preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) #", "image from keras.utils import data_utils from tensorflow.python.platform import test from ..bit import BiT_S_R50x1,", "from tensorflow.python.platform import test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from", "BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1,", "(None, 21843)) # x = _get_elephant((299, 299)) # x = preprocess_input(x) # preds", "app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299,", "even when loading ImageNet weights (since it is possible). # In this case,", "axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise AssertionError( 'Shapes are different", "= app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x = preprocess_input(x) preds", "3673, 3674]) def _get_elephant(target_size): # For models that don't include a Flatten step,", "(since it is possible). # In this case, default to 299x299. if target_size[0]", "app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x =", "[348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model = app() #", "model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size):", "= _get_elephant((299, 299)) # x = preprocess_input(x) # preds = model.predict(x) # label", "axis=-1) # self.assertIn(label, [3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self,", "preds = model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674])", "to 299x299. if target_size[0] is None: target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg',", "as np from absl.testing import parameterized from keras.preprocessing import image from keras.utils import", "= [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000", "different rank: %s vs %s' % (shape1, shape2)) if shape1 != shape2: raise", "= _get_elephant((224, 224)) # x = preprocess_input(x) # preds = model.predict(x) # label", "!= shape2: raise AssertionError('Shapes differ: %s vs %s' % (shape1, shape2)) if __name__", "model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x = preprocess_input(x)", "= model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def", "386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x =", "variable-size inputs # even when loading ImageNet weights (since it is possible). #", "is to accept variable-size inputs # even when loading ImageNet weights (since it", "ImageNet weights (since it is possible). # In this case, default to 299x299.", "the default is to accept variable-size inputs # even when loading ImageNet weights", "shape2): if len(shape1) != len(shape2): raise AssertionError( 'Shapes are different rank: %s vs", "# def test_application_predict_even_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) #", "_IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x = preprocess_input(x) preds = model.predict(x) label =", "ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x", "app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x = preprocess_input(x) preds =", "test_application_predict_odd_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x =", "app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((299, 299)) x =", "= model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def", "# label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): #", "if shape1 != shape2: raise AssertionError('Shapes differ: %s vs %s' % (shape1, shape2))", "axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model =", "AssertionError( 'Shapes are different rank: %s vs %s' % (shape1, shape2)) if shape1", "label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app):", "= np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): # For models", "3674]) def _get_elephant(target_size): # For models that don't include a Flatten step, #", "MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ]", "[348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x", "that don't include a Flatten step, # the default is to accept variable-size", "# x = _get_elephant((299, 299)) # x = preprocess_input(x) # preds = model.predict(x)", "data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0) def", "# label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) # # #", "'Shapes are different rank: %s vs %s' % (shape1, shape2)) if shape1 !=", "len(shape1) != len(shape2): raise AssertionError( 'Shapes are different rank: %s vs %s' %", "model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299, 299)) #", "_get_elephant((299, 299)) # x = preprocess_input(x) # preds = model.predict(x) # label =", "if len(shape1) != len(shape2): raise AssertionError( 'Shapes are different rank: %s vs %s'", "parameterized from keras.preprocessing import image from keras.utils import data_utils from tensorflow.python.platform import test", "@parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843))", "BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg')", "np from absl.testing import parameterized from keras.preprocessing import image from keras.utils import data_utils", "model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224, 224)) #", "weights (since it is possible). # In this case, default to 299x299. if", "it is possible). # In this case, default to 299x299. if target_size[0] is", "= image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise", "accept variable-size inputs # even when loading ImageNet weights (since it is possible).", "(None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x = preprocess_input(x) preds = model.predict(x) label", "len(shape2): raise AssertionError( 'Shapes are different rank: %s vs %s' % (shape1, shape2))", "# the default is to accept variable-size inputs # even when loading ImageNet", "import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3,", "model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self,", "# self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): # For models that don't include", "case, default to 299x299. if target_size[0] is None: target_size = (299, 299) test_image", "# BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4]", "= preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S)", "= np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app()", "3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model = app()", "shape2: raise AssertionError('Shapes differ: %s vs %s' % (shape1, shape2)) if __name__ ==", "3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model =", "self.assertIn(label, [3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): #", "..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1,", "class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES))", "224)) # x = preprocess_input(x) # preds = model.predict(x) # label = np.argmax(preds[0],", "(None, 21843)) # x = _get_elephant((224, 224)) # x = preprocess_input(x) # preds", "np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model", "Flatten step, # the default is to accept variable-size inputs # even when", "axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None,", "import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3,", "= app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x = preprocess_input(x) preds", "label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M)", "299)) # x = preprocess_input(x) # preds = model.predict(x) # label = np.argmax(preds[0],", "model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x = preprocess_input(x)", "= preprocess_input(x) # preds = model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label,", "parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x =", "numpy as np from absl.testing import parameterized from keras.preprocessing import image from keras.utils", "app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224, 224)) # x =", "return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise AssertionError( 'Shapes", "= 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape,", "def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224))", "absl.testing import parameterized from keras.preprocessing import image from keras.utils import data_utils from tensorflow.python.platform", "BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase,", "def _get_elephant(target_size): # For models that don't include a Flatten step, # the", "BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, #", "np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): # For models that", "from absl.testing import parameterized from keras.preprocessing import image from keras.utils import data_utils from", "target_size[0] is None: target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img =", "_get_elephant((224, 224)) # x = preprocess_input(x) # preds = model.predict(x) # label =", "if target_size[0] is None: target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img", "preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def", "import image from keras.utils import data_utils from tensorflow.python.platform import test from ..bit import", "Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1,", "= [ BiT_S_R50x1, # Bad weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M", "%s vs %s' % (shape1, shape2)) if shape1 != shape2: raise AssertionError('Shapes differ:", "test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES)) x = _get_elephant((224, 224)) x", "x = _get_elephant((299, 299)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0],", "raise AssertionError('Shapes differ: %s vs %s' % (shape1, shape2)) if __name__ == '__main__':", "In this case, default to 299x299. if target_size[0] is None: target_size = (299,", "= model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app):", "# # @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_even_m(self, app): # model = app() # _assert_shape_equal(model.output_shape,", "BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1,", "BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights", "label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model =", "# model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((224, 224))", "299x299. if target_size[0] is None: target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH)", "TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x = image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1,", "= np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) # # # @parameterized.parameters(*MODEL_LIST_M) #", "this case, default to 299x299. if target_size[0] is None: target_size = (299, 299)", "BiT_S_R152x4 from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input", "[BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH = ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class", "import data_utils from tensorflow.python.platform import test from ..bit import BiT_S_R50x1, BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3,", "is possible). # In this case, default to 299x299. if target_size[0] is None:", "image.img_to_array(img) return np.expand_dims(x, axis=0) def _assert_shape_equal(shape1, shape2): if len(shape1) != len(shape2): raise AssertionError(", "..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S =", "target_size = (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x", "preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386]) # @parameterized.parameters(*MODEL_LIST_M)", "label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): # For", "self.assertIn(label, [348, 386]) @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_even_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None, _IMAGENET_CLASSES))", "_IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app()", "224)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348,", "BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4] TEST_IMAGE_PATH =", "= (299, 299) test_image = data_utils.get_file('elephant.jpg', TEST_IMAGE_PATH) img = image.load_img(test_image, target_size=tuple(target_size)) x =", "x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348, 386])", "from ..bit import preprocess_input MODEL_LIST_S = [ BiT_S_R50x1, # Bad weights # BiT_S_R50x3,", "299)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label, [348,", "# def test_application_predict_odd_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) #", "# preds = model.predict(x) # label = np.argmax(preds[0], axis=-1) # self.assertIn(label, [3671, 3673,", "weights # BiT_S_R50x3, BiT_S_R101x1, BiT_S_R101x3, BiT_S_R152x4 ] MODEL_LIST_M = [BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3,", "axis=-1) # self.assertIn(label, [3671, 3673, 3674]) def _get_elephant(target_size): # For models that don't", "% (shape1, shape2)) if shape1 != shape2: raise AssertionError('Shapes differ: %s vs %s'", "from keras.utils import data_utils from tensorflow.python.platform import test from ..bit import BiT_S_R50x1, BiT_S_R50x3,", "# model = app() # _assert_shape_equal(model.output_shape, (None, 21843)) # x = _get_elephant((299, 299))", "[3671, 3673, 3674]) def _get_elephant(target_size): # For models that don't include a Flatten", "import parameterized from keras.preprocessing import image from keras.utils import data_utils from tensorflow.python.platform import", "%s' % (shape1, shape2)) if shape1 != shape2: raise AssertionError('Shapes differ: %s vs", "= ('https://storage.googleapis.com/tensorflow/' 'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app):", "x = preprocess_input(x) # preds = model.predict(x) # label = np.argmax(preds[0], axis=-1) #", "a Flatten step, # the default is to accept variable-size inputs # even", "from ..bit import BiT_M_R50x1, BiT_M_R50x3, BiT_M_R101x1, BiT_M_R101x3, BiT_M_R152x4 from ..bit import preprocess_input MODEL_LIST_S", "# @parameterized.parameters(*MODEL_LIST_M) # def test_application_predict_odd_m(self, app): # model = app() # _assert_shape_equal(model.output_shape, (None,", "1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model = app() _assert_shape_equal(model.output_shape, (None,", "= _get_elephant((299, 299)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1)", "_get_elephant((299, 299)) x = preprocess_input(x) preds = model.predict(x) label = np.argmax(preds[0], axis=-1) self.assertIn(label,", "'keras-applications/tests/elephant.jpg') _IMAGENET_CLASSES = 1000 class ApplicationsLoadWeightTest(test.TestCase, parameterized.TestCase): @parameterized.parameters(*MODEL_LIST_S) def test_application_predict_odd_s(self, app): model =" ]
[ "validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p in", "num_threads=3) if __name__ == \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for", "subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p in patient_ids: file = join(folder,", "'esembly fullres and lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder,", "json_task=task, num_threads=3) if __name__ == \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores", "from batchgenerators.utilities.file_and_folder_operations import * from nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids =", "metrics scores for validation folder containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with", "json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\": import", "and lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name", "gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p in patient_ids:", "GT nifti files.\", required=False) args = parser.parse_args() folder = args.folder gt_folder = args.gtfolder", "use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\":", "parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str,", "to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti files.\", required=False) args", "p in patient_ids: file = join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file])", "import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing nifti files\")", "pred_gt_tuples = [] for p in patient_ids: file = join(folder, p) gt_file =", "job_name = 'esembly fullres and lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)),", "join=False) pred_gt_tuples = [] for p in patient_ids: file = join(folder, p) gt_file", "= argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing nifti files\") parser.add_argument('-f', '--folder', type=str,", "argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder", "p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres' num_classes", "gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly fullres", "nifti files.\", required=False) args = parser.parse_args() folder = args.folder gt_folder = args.gtfolder validate(folder,", "help=\"Folder with nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT", "join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres'", "folder containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files to evaluate\",", "= aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if", "containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files to evaluate\", required=True)", "folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes", "= join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name", "fullres and lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"),", "evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti files.\", required=False) args =", "if __name__ == \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation", "scores for validation folder containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti", "\"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing nifti", "+ folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\": import argparse parser =", "with GT nifti files.\", required=False) args = parser.parse_args() folder = args.folder gt_folder =", "validation folder containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files to", "join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name =", "in patient_ids: file = join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task", "[] for p in patient_ids: file = join(folder, p) gt_file = join(gt_folder, p)", "* from nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False)", "for p in patient_ids: file = join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file,", "p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly", "aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__", "parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti files.\", required=False) args = parser.parse_args() folder", "batchgenerators.utilities.file_and_folder_operations import * from nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder,", "gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres' num_classes = 3", "nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files to evaluate\", required=True) parser.add_argument('-gtf',", "type=str, help=\"Folder with GT nifti files.\", required=False) args = parser.parse_args() folder = args.folder", "files.\", required=False) args = parser.parse_args() folder = args.folder gt_folder = args.gtfolder validate(folder, gt_folder)", "= folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres' num_classes = 3 _ =", "nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples =", "json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics", "== \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing", "nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti files.\",", "files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder',", "def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p", "files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti files.\", required=False)", "json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\": import argparse parser", "= [] for p in patient_ids: file = join(folder, p) gt_file = join(gt_folder,", "patient_ids: file = join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task =", "task = folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres' num_classes = 3 _", "argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing nifti files\") parser.add_argument('-f',", "suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p in patient_ids: file = join(folder, p)", "__name__ == \"__main__\": import argparse parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation folder", "\"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ == \"__main__\": import argparse", "with nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti", "import * from nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\",", "= 'esembly fullres and lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None,", "import aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = []", "labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3) if __name__ ==", "= 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\",", "3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task,", "folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples,", "parser = argparse.ArgumentParser(description=\"Computes metrics scores for validation folder containing nifti files\") parser.add_argument('-f', '--folder',", "lowres' num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name +", "pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly fullres and lowres' num_classes =", "'--gtfolder', type=str, help=\"Folder with GT nifti files.\", required=False) args = parser.parse_args() folder =", "required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with GT nifti files.\", required=False) args = parser.parse_args()", "<reponame>PawelPeczek/Abdomen-CT-Image-Segmentation from batchgenerators.utilities.file_and_folder_operations import * from nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids", "= subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p in patient_ids: file =", "from nnunet.evaluation.evaluator import aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples", "= join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4] job_name = 'esembly fullres and", "'--folder', type=str, help=\"Folder with nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder", "for validation folder containing nifti files\") parser.add_argument('-f', '--folder', type=str, help=\"Folder with nifti files", "_ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2], json_author=\"Bety\", json_task=task, num_threads=3)", "type=str, help=\"Folder with nifti files to evaluate\", required=True) parser.add_argument('-gtf', '--gtfolder', type=str, help=\"Folder with", "help=\"Folder with GT nifti files.\", required=False) args = parser.parse_args() folder = args.folder gt_folder", "file = join(folder, p) gt_file = join(gt_folder, p) pred_gt_tuples.append([file, gt_file]) task = folder.split(\"/\")[-4]", "num_classes = 3 _ = aggregate_scores(pred_gt_tuples, labels=list(range(num_classes)), use_label=None, json_output_file=join(folder, \"summary.json\"), json_name=job_name + folder.split(\"/\")[-2],", "aggregate_scores def validate(folder, gt_folder): patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for", "patient_ids = subfiles(folder, suffix=\".nii.gz\", join=False) pred_gt_tuples = [] for p in patient_ids: file" ]
[ "step_size = 1 while True: pwm.set_pwm(0, 0, pulse) if pulse < servo_max and", "pulse = servo_min increasing = True step_size = 1 while True: pwm.set_pwm(0, 0,", "+= step_size increasing = True elif pulse > servo_min: pulse -= step_size increasing", "step_size increasing = True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0,", "servo_min increasing = True step_size = 1 while True: pwm.set_pwm(0, 0, pulse) if", "= 450 pulse = servo_min increasing = True step_size = 1 while True:", "from Adafruit_PCA9685 import PCA9685 import time pwm = PCA9685() servo_min = 250 servo_max", "-= step_size increasing = False else: pulse += step_size increasing = True time.sleep(0.01)", "increasing = True step_size = 1 while True: pwm.set_pwm(0, 0, pulse) if pulse", "True elif pulse > servo_min: pulse -= step_size increasing = False else: pulse", "True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0, 0, servo_max) time.sleep(0.5)", "= servo_min increasing = True step_size = 1 while True: pwm.set_pwm(0, 0, pulse)", "servo_min: pulse -= step_size increasing = False else: pulse += step_size increasing =", "= True step_size = 1 while True: pwm.set_pwm(0, 0, pulse) if pulse <", "pulse) if pulse < servo_max and increasing: pulse += step_size increasing = True", "+= step_size increasing = True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5)", "print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0, 0, servo_max) time.sleep(0.5) pwm.set_pwm(0, 0,", "PCA9685 import time pwm = PCA9685() servo_min = 250 servo_max = 450 pulse", "servo_min = 250 servo_max = 450 pulse = servo_min increasing = True step_size", "pwm = PCA9685() servo_min = 250 servo_max = 450 pulse = servo_min increasing", "True: pwm.set_pwm(0, 0, pulse) if pulse < servo_max and increasing: pulse += step_size", "0, pulse) if pulse < servo_max and increasing: pulse += step_size increasing =", "= PCA9685() servo_min = 250 servo_max = 450 pulse = servo_min increasing =", "pulse -= step_size increasing = False else: pulse += step_size increasing = True", "while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0, 0, servo_max) time.sleep(0.5) pwm.set_pwm(0, 0, 0)", "PCA9685() servo_min = 250 servo_max = 450 pulse = servo_min increasing = True", "servo_max = 450 pulse = servo_min increasing = True step_size = 1 while", "#!/usr/bin/env python3 from Adafruit_PCA9685 import PCA9685 import time pwm = PCA9685() servo_min =", "pulse < servo_max and increasing: pulse += step_size increasing = True elif pulse", "and increasing: pulse += step_size increasing = True elif pulse > servo_min: pulse", "step_size increasing = True elif pulse > servo_min: pulse -= step_size increasing =", "increasing = True elif pulse > servo_min: pulse -= step_size increasing = False", "False else: pulse += step_size increasing = True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0,", "= True elif pulse > servo_min: pulse -= step_size increasing = False else:", "import PCA9685 import time pwm = PCA9685() servo_min = 250 servo_max = 450", "import time pwm = PCA9685() servo_min = 250 servo_max = 450 pulse =", "elif pulse > servo_min: pulse -= step_size increasing = False else: pulse +=", "step_size increasing = False else: pulse += step_size increasing = True time.sleep(0.01) print(pulse)", "True step_size = 1 while True: pwm.set_pwm(0, 0, pulse) if pulse < servo_max", "pulse += step_size increasing = True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min)", "= 250 servo_max = 450 pulse = servo_min increasing = True step_size =", "450 pulse = servo_min increasing = True step_size = 1 while True: pwm.set_pwm(0,", "increasing: pulse += step_size increasing = True elif pulse > servo_min: pulse -=", "increasing = False else: pulse += step_size increasing = True time.sleep(0.01) print(pulse) while", "pulse += step_size increasing = True elif pulse > servo_min: pulse -= step_size", "250 servo_max = 450 pulse = servo_min increasing = True step_size = 1", "time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0, 0, servo_max) time.sleep(0.5) pwm.set_pwm(0,", "= 1 while True: pwm.set_pwm(0, 0, pulse) if pulse < servo_max and increasing:", "pulse > servo_min: pulse -= step_size increasing = False else: pulse += step_size", "time pwm = PCA9685() servo_min = 250 servo_max = 450 pulse = servo_min", "if pulse < servo_max and increasing: pulse += step_size increasing = True elif", "Adafruit_PCA9685 import PCA9685 import time pwm = PCA9685() servo_min = 250 servo_max =", "< servo_max and increasing: pulse += step_size increasing = True elif pulse >", "1 while True: pwm.set_pwm(0, 0, pulse) if pulse < servo_max and increasing: pulse", "while True: pwm.set_pwm(0, 0, pulse) if pulse < servo_max and increasing: pulse +=", "pwm.set_pwm(0, 0, pulse) if pulse < servo_max and increasing: pulse += step_size increasing", "= True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0, 0, servo_max)", "else: pulse += step_size increasing = True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0,", "servo_max and increasing: pulse += step_size increasing = True elif pulse > servo_min:", "increasing = True time.sleep(0.01) print(pulse) while False: pwm.set_pwm(0, 0, servo_min) time.sleep(0.5) pwm.set_pwm(0, 0,", "> servo_min: pulse -= step_size increasing = False else: pulse += step_size increasing", "python3 from Adafruit_PCA9685 import PCA9685 import time pwm = PCA9685() servo_min = 250", "= False else: pulse += step_size increasing = True time.sleep(0.01) print(pulse) while False:" ]
[ "# noqa from .drones import * # noqa from .medications import * #", "from .deliveries import * # noqa from .drones import * # noqa from", "* # noqa from .drones import * # noqa from .medications import *", "import * # noqa from .drones import * # noqa from .medications import", ".deliveries import * # noqa from .drones import * # noqa from .medications", "noqa from .drones import * # noqa from .medications import * # noqa" ]
[ "\"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080))", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "# https://github.com/initbar/sipd import re import unittest from src.sockets import * from src.parser import", "DEALINGS IN THE # SOFTWARE. # # https://github.com/initbar/sipd import re import unittest from", "software and associated documentation files (the \"Software\"), to deal # in the Software", "copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "OTHER DEALINGS IN THE # SOFTWARE. # # https://github.com/initbar/sipd import re import unittest", "\"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\"))", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self):", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\",", "def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "do so, subject to the following conditions: # # The above copyright notice", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1] with safe_allocate_udp_client()", "and to permit persons to whom the Software is # furnished to do", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "the Software without restriction, including without limitation the rights # to use, copy,", "IN THE # SOFTWARE. # # https://github.com/initbar/sipd import re import unittest from src.sockets", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "the following conditions: # # The above copyright notice and this permission notice", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "https://github.com/initbar/sipd import re import unittest from src.sockets import * from src.parser import *", "import * class TestSockets(unittest.TestCase): # # ip address # def test_sockets_get_server_address(self): ip_address =", "test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self):", "person obtaining a copy # of this software and associated documentation files (the", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "# furnished to do so, subject to the following conditions: # # The", "the Software, and to permit persons to whom the Software is # furnished", "permit persons to whom the Software is # furnished to do so, subject", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "import re import unittest from src.sockets import * from src.parser import * class", "test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp", "Permission is hereby granted, free of charge, to any person obtaining a copy", "\"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080))", "OR OTHER DEALINGS IN THE # SOFTWARE. # # https://github.com/initbar/sipd import re import", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket()", "in the Software without restriction, including without limitation the rights # to use,", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Software without restriction, including without limitation the rights # to use, copy, modify,", "IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def", "# # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080))", "def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "# udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def", "unittest from src.sockets import * from src.parser import * class TestSockets(unittest.TestCase): # #", "self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self):", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "!= \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\",", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "copies of the Software, and to permit persons to whom the Software is", "# The above copyright notice and this permission notice shall be included in", "src.parser import * class TestSockets(unittest.TestCase): # # ip address # def test_sockets_get_server_address(self): ip_address", "included in all # copies or substantial portions of the Software. # #", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # #", "def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # #", "# of this software and associated documentation files (the \"Software\"), to deal #", "to do so, subject to the following conditions: # # The above copyright", "Copyright (c) 2018 <NAME> # # Permission is hereby granted, free of charge,", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "is hereby granted, free of charge, to any person obtaining a copy #", "above copyright notice and this permission notice shall be included in all #", "persons to whom the Software is # furnished to do so, subject to", "sell # copies of the Software, and to permit persons to whom the", "<NAME> # # Permission is hereby granted, free of charge, to any person", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "conditions: # # The above copyright notice and this permission notice shall be", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "to permit persons to whom the Software is # furnished to do so,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "License # # Copyright (c) 2018 <NAME> # # Permission is hereby granted,", "self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\",", "notice shall be included in all # copies or substantial portions of the", "from src.parser import * class TestSockets(unittest.TestCase): # # ip address # def test_sockets_get_server_address(self):", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "of charge, to any person obtaining a copy # of this software and", "whom the Software is # furnished to do so, subject to the following", "SOFTWARE. # # https://github.com/initbar/sipd import re import unittest from src.sockets import * from", "# def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) #", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "# # Permission is hereby granted, free of charge, to any person obtaining", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "free of charge, to any person obtaining a copy # of this software", "self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port", "shall be included in all # copies or substantial portions of the Software.", "ip address # def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no IP found\")", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "The above copyright notice and this permission notice shall be included in all", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # https://github.com/initbar/sipd", "and/or sell # copies of the Software, and to permit persons to whom", "so, subject to the following conditions: # # The above copyright notice and", "this permission notice shall be included in all # copies or substantial portions", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "# ip address # def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no IP", "test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1] with", "def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1]", "def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "# copies or substantial portions of the Software. # # THE SOFTWARE IS", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # https://github.com/initbar/sipd import", "# in the Software without restriction, including without limitation the rights # to", "is # furnished to do so, subject to the following conditions: # #", "8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1] with safe_allocate_udp_client() as", "files (the \"Software\"), to deal # in the Software without restriction, including without", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "MIT License # # Copyright (c) 2018 <NAME> # # Permission is hereby", "copy # of this software and associated documentation files (the \"Software\"), to deal", "* class TestSockets(unittest.TestCase): # # ip address # def test_sockets_get_server_address(self): ip_address = get_server_address()", "(c) 2018 <NAME> # # Permission is hereby granted, free of charge, to", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "to the following conditions: # # The above copyright notice and this permission", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "to deal # in the Software without restriction, including without limitation the rights", "8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port =", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "to any person obtaining a copy # of this software and associated documentation", "class TestSockets(unittest.TestCase): # # ip address # def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address", "found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self):", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "following conditions: # # The above copyright notice and this permission notice shall", "of the Software, and to permit persons to whom the Software is #", "in all # copies or substantial portions of the Software. # # THE", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. #", "TestSockets(unittest.TestCase): # # ip address # def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address !=", "and associated documentation files (the \"Software\"), to deal # in the Software without", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "re import unittest from src.sockets import * from src.parser import * class TestSockets(unittest.TestCase):", "def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def", "any person obtaining a copy # of this software and associated documentation files", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "# # The above copyright notice and this permission notice shall be included", "self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\",", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "2018 <NAME> # # Permission is hereby granted, free of charge, to any", "self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080))", "test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self):", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self):", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def", "sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\",", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "# Copyright (c) 2018 <NAME> # # Permission is hereby granted, free of", "a copy # of this software and associated documentation files (the \"Software\"), to", "deal # in the Software without restriction, including without limitation the rights #", "sublicense, and/or sell # copies of the Software, and to permit persons to", "Software is # furnished to do so, subject to the following conditions: #", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "import * from src.parser import * class TestSockets(unittest.TestCase): # # ip address #", "# # https://github.com/initbar/sipd import re import unittest from src.sockets import * from src.parser", "Software, and to permit persons to whom the Software is # furnished to", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "* from src.parser import * class TestSockets(unittest.TestCase): # # ip address # def", "def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1] with safe_allocate_udp_client() as udp_client:", "ip_address = get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "# SOFTWARE. # # https://github.com/initbar/sipd import re import unittest from src.sockets import *", "THE # SOFTWARE. # # https://github.com/initbar/sipd import re import unittest from src.sockets import", "all # copies or substantial portions of the Software. # # THE SOFTWARE", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "this software and associated documentation files (the \"Software\"), to deal # in the", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "copyright notice and this permission notice shall be included in all # copies", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "charge, to any person obtaining a copy # of this software and associated", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "# # Copyright (c) 2018 <NAME> # # Permission is hereby granted, free", "self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets # def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\",", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "USE OR OTHER DEALINGS IN THE # SOFTWARE. # # https://github.com/initbar/sipd import re", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "hereby granted, free of charge, to any person obtaining a copy # of", "of this software and associated documentation files (the \"Software\"), to deal # in", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "= get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address)) # # udp sockets #", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "granted, free of charge, to any person obtaining a copy # of this", "test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\")) def test_sockets_unsafe_allocate_udp_socket_hostname_1(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.1\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self):", "# copies of the Software, and to permit persons to whom the Software", "# def test_sockets_unsafe_allocate_udp_socket_empty_both(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", \"\")) def test_sockets_unsafe_allocate_udp_socket_empty_host(self): self.assertFalse(unsafe_allocate_udp_socket(\"\", 8080)) def test_sockets_unsafe_allocate_udp_socket_empty_port(self): self.assertFalse(unsafe_allocate_udp_socket(\"127.0.0.1\", \"\"))", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "src.sockets import * from src.parser import * class TestSockets(unittest.TestCase): # # ip address", "test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket:", "import unittest from src.sockets import * from src.parser import * class TestSockets(unittest.TestCase): #", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "address # def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no IP found\") self.assertTrue(REGX_IPV4.match(ip_address))", "to whom the Software is # furnished to do so, subject to the", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1] with safe_allocate_udp_client() as udp_client: udp_client.connect((\"127.0.0.1\", socket_port))", "# # ip address # def test_sockets_get_server_address(self): ip_address = get_server_address() self.assertTrue(ip_address != \"no", "permission notice shall be included in all # copies or substantial portions of", "furnished to do so, subject to the following conditions: # # The above", "and this permission notice shall be included in all # copies or substantial", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "be included in all # copies or substantial portions of the Software. #", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as udp_socket: socket_port = udp_socket.getsockname()[1] with safe_allocate_udp_client() as udp_client: udp_client.connect((\"127.0.0.1\",", "# Permission is hereby granted, free of charge, to any person obtaining a", "def test_sockets_unsafe_allocate_udp_socket_hostname_2(self): self.assertFalse(unsafe_allocate_udp_socket(\"localhose\", 8080)) def test_sockets_unsafe_allocate_udp_socket_hostname_3(self): self.assertFalse(unsafe_allocate_udp_socket(\"0.0.0.0.0\", 8080)) def test_sockets_unsafe_allocate_udp_socket(self): with safe_allocate_random_udp_socket() as", "# MIT License # # Copyright (c) 2018 <NAME> # # Permission is", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "the Software is # furnished to do so, subject to the following conditions:", "subject to the following conditions: # # The above copyright notice and this", "notice and this permission notice shall be included in all # copies or", "from src.sockets import * from src.parser import * class TestSockets(unittest.TestCase): # # ip" ]
[ "Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255)", "Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step 2 - Max Pooling", "3 - Flatten classifier.add(Flatten()) # Step 4 - Fully Connected Layer classifier.add(Dense(128, activation='relu'))", "'data/validation', # target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) #", "keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL", "classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size", "4 - Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the", "ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1,", "metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen", "# Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution", "classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) #", "from keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout #", "# Step 3 - Flatten classifier.add(Flatten()) # Step 4 - Fully Connected Layer", "#validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator(", "# Initialising the CNN classifier = Sequential() # Step 1 - Convolution classifier.add(Conv2D(16,", "classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen =", "- Flatten classifier.add(Flatten()) # Step 4 - Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3))", "class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), # batch_size=32, # class_mode='binary')", "keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising the CNN classifier =", "= ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical')", "MaxPooling2D, Flatten, Dense, Dropout # Initialising the CNN classifier = Sequential() # Step", "(2,2))) # Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256,", "classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 -", "Importing all the libraries from keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D,", "Recognation For Static and Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries from keras.models", "For Static and Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries from keras.models import", "Initialising the CNN classifier = Sequential() # Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3,", "classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image", "= (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 - Flatten", "Flatten classifier.add(Flatten()) # Step 4 - Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36,", "and Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries from keras.models import Sequential from", "import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures", "- Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step 2 - Max", "Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution layers", "Flatten, Dense, Dropout # Initialising the CNN classifier = Sequential() # Step 1", "Dense, Dropout # Initialising the CNN classifier = Sequential() # Step 1 -", "# 'data/validation', # target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5)", "= Sequential() # Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1)))", "= (2,2))) # Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2)))", "train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110,", "the CNN classifier = Sequential() # Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu',", "# Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image", "2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution layers classifier.add(Conv2D(16,", "Step 4 - Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile", "Sequential() # Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) #", "#classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 - Flatten classifier.add(Flatten()) # Step 4 -", "activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras", "ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator", "input_shape=(110, 110, 1))) # Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) #", "classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing", "Static and Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries from keras.models import Sequential", "Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size", "Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator =", "batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save the Model import joblib", "Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop',", "Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen", "DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150,", "#classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 - Flatten classifier.add(Flatten()) #", "color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), # batch_size=32, #", "keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising", "ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet',", "Language Recognation For Static and Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries from", "libraries from keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout", "1))) # Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra", "from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising the CNN classifier", "extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size", "Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image import", "= (2,2))) # Step 3 - Flatten classifier.add(Flatten()) # Step 4 - Fully", "Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising the CNN classifier = Sequential() #", "import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising the", "= train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory(", "Step 3 - Flatten classifier.add(Flatten()) # Step 4 - Fully Connected Layer classifier.add(Dense(128,", "(2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 - Flatten classifier.add(Flatten())", "Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu'))", "batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), # batch_size=32,", "(2,2))) # Step 3 - Flatten classifier.add(Flatten()) # Step 4 - Fully Connected", "Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising the CNN", "# Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2,", "the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator", "train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( #", "Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu'))", "layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2)))", "classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size =", "# Step 4 - Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) #", "activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 - Flatten classifier.add(Flatten()) # Step 4", "- Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model", "kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3 - Flatten classifier.add(Flatten()) # Step", "# batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save the Model import", "activation='relu', input_shape=(110, 110, 1))) # Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2)))", "activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step 3", "Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy',", "classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) #", "Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator", "'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation',", "train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator =", "loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255)", "Dropout # Initialising the CNN classifier = Sequential() # Step 1 - Convolution", "# Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step", "Gestures/ISL-CNN-Model.py # Importing all the libraries from keras.models import Sequential from keras.layers import", "activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Keras Image Preprocessing from", "#test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale',", "Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax')) # Compile the Model classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])", "Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step 2", "- Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding extra convolution layers classifier.add(Conv2D(16, kernel_size=3,", "classifier = Sequential() # Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110,", "Gestures DataSet', target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', #", "110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), #", "150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save the Model", "class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save the Model import joblib joblib.dump(classifier, 'ISL-CNN-Model')", "# Keras Image Preprocessing from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen =", "110, 1))) # Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size = (2,2))) # Adding", "# class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save the Model import joblib joblib.dump(classifier,", "kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size = (2,2))) # Step", "all the libraries from keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten,", "# Importing all the libraries from keras.models import Sequential from keras.layers import Conv2D,", "Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries from keras.models import Sequential from keras.layers", "from keras.preprocessing.image import ImageDataGenerator train_datagen = ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory(", "<filename>Indian Sign Language Recognation For Static and Dynamic Gestures/ISL-CNN-Model.py # Importing all the", "target_size=(110, 110), batch_size=1, color_mode='grayscale', class_mode='categorical') #validation_generator = test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150),", "convolution layers classifier.add(Conv2D(16, kernel_size=3, activation='relu')) classifier.add(MaxPooling2D(pool_size = (2,2))) #classifier.add(Conv2D(256, kernel_size=2, activation='relu')) #classifier.add(MaxPooling2D(pool_size =", "target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save the", "CNN classifier = Sequential() # Step 1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110,", "1 - Convolution classifier.add(Conv2D(16, kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step 2 -", "test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920,", "Sign Language Recognation For Static and Dynamic Gestures/ISL-CNN-Model.py # Importing all the libraries", "= ImageDataGenerator(rescale=1./255) #test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( 'ISL Gestures DataSet', target_size=(110, 110),", "the libraries from keras.models import Sequential from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense,", "classifier.add(Flatten()) # Step 4 - Fully Connected Layer classifier.add(Dense(128, activation='relu')) classifier.add(Dropout(0.3)) classifier.add(Dense(36, activation='softmax'))", "import Conv2D, MaxPooling2D, Flatten, Dense, Dropout # Initialising the CNN classifier = Sequential()", "# target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator, steps_per_epoch=7920, epochs=5) # Save", "= test_datagen.flow_from_directory( # 'data/validation', # target_size=(150, 150), # batch_size=32, # class_mode='binary') classifier.fit_generator( train_generator,", "kernel_size=3, activation='relu', input_shape=(110, 110, 1))) # Step 2 - Max Pooling classifier.add(MaxPooling2D(pool_size =" ]
[ "<gh_stars>1-10 # THE OLD MONK for _ in range(int(input())): N = int(input()) A", "= [int(a) for a in input().split()] res = 0 mx = 0 for", "in input().split()] B = [int(a) for a in input().split()] res = 0 mx", "[int(a) for a in input().split()] B = [int(a) for a in input().split()] res", "int(input()) A = [int(a) for a in input().split()] B = [int(a) for a", "a in input().split()] res = 0 mx = 0 for i in range(N):", "range(i,N,1): if A[i]>B[j]: break res = j-i if res>mx: mx = res print(mx)", "THE OLD MONK for _ in range(int(input())): N = int(input()) A = [int(a)", "res = 0 mx = 0 for i in range(N): for j in", "0 mx = 0 for i in range(N): for j in range(i,N,1): if", "= 0 for i in range(N): for j in range(i,N,1): if A[i]>B[j]: break", "N = int(input()) A = [int(a) for a in input().split()] B = [int(a)", "B = [int(a) for a in input().split()] res = 0 mx = 0", "= 0 mx = 0 for i in range(N): for j in range(i,N,1):", "= int(input()) A = [int(a) for a in input().split()] B = [int(a) for", "0 for i in range(N): for j in range(i,N,1): if A[i]>B[j]: break res", "i in range(N): for j in range(i,N,1): if A[i]>B[j]: break res = j-i", "_ in range(int(input())): N = int(input()) A = [int(a) for a in input().split()]", "input().split()] res = 0 mx = 0 for i in range(N): for j", "in range(N): for j in range(i,N,1): if A[i]>B[j]: break res = j-i if", "range(N): for j in range(i,N,1): if A[i]>B[j]: break res = j-i if res>mx:", "in input().split()] res = 0 mx = 0 for i in range(N): for", "for a in input().split()] res = 0 mx = 0 for i in", "MONK for _ in range(int(input())): N = int(input()) A = [int(a) for a", "range(int(input())): N = int(input()) A = [int(a) for a in input().split()] B =", "# THE OLD MONK for _ in range(int(input())): N = int(input()) A =", "in range(int(input())): N = int(input()) A = [int(a) for a in input().split()] B", "for j in range(i,N,1): if A[i]>B[j]: break res = j-i if res>mx: mx", "input().split()] B = [int(a) for a in input().split()] res = 0 mx =", "= [int(a) for a in input().split()] B = [int(a) for a in input().split()]", "[int(a) for a in input().split()] res = 0 mx = 0 for i", "in range(i,N,1): if A[i]>B[j]: break res = j-i if res>mx: mx = res", "a in input().split()] B = [int(a) for a in input().split()] res = 0", "for i in range(N): for j in range(i,N,1): if A[i]>B[j]: break res =", "OLD MONK for _ in range(int(input())): N = int(input()) A = [int(a) for", "A = [int(a) for a in input().split()] B = [int(a) for a in", "for a in input().split()] B = [int(a) for a in input().split()] res =", "mx = 0 for i in range(N): for j in range(i,N,1): if A[i]>B[j]:", "j in range(i,N,1): if A[i]>B[j]: break res = j-i if res>mx: mx =", "for _ in range(int(input())): N = int(input()) A = [int(a) for a in" ]
[ "body assert \"message\" not in body assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\",", "Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without", "a higher total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a", "403 body = res.json() assert \"ok\" not in body assert \"message\" in body", "/backend/validate without IAM authorization \"\"\" card_number = \"1234567890123456\" total = 3000 # Create", "payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert res.status_code == 400 body =", "}) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM authorization \"\"\" card_number =", "payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a higher total", "total-1000 } ) assert res.status_code == 200 body = res.json() assert \"ok\" in", "\"\"\" Test /backend/validate with a higher total \"\"\" card_number = \"1234567890123456\" total =", "def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth):", "card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res =", "import iam_auth # pylint: disable=import-error from helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\")", "assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url,", "body assert \"message\" not in body assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\",", "# Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test", "assert \"message\" in body assert \"paymentToken\" in body[\"message\"] # Cleanup cancelPayment requests.post(payment_3p_api_url+\"/cancelPayment\", json={", "with a non-existent token \"\"\" payment_token = str(<KEY>()) total = 3000 # Validate", "pytest import requests from fixtures import iam_auth # pylint: disable=import-error from helpers import", "= requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 } ) assert res.status_code", "in body assert \"message\" not in body assert body[\"ok\"] == False # Cleanup", "\"total\": total } ) assert res.status_code == 403 body = res.json() assert \"ok\"", "iam_auth): \"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\" total = 3000 # Create", "payment_token } ) assert res.status_code == 400 body = res.json() assert \"ok\" not", "\"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM authorization \"\"\"", "assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url,", "= requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code", "payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\"", "assert res.status_code == 403 body = res.json() assert \"ok\" not in body assert", "payment_api_url, iam_auth): \"\"\" Test /backend/validate without a payment token \"\"\" card_number = \"1234567890123456\"", "body = res.json() assert \"ok\" not in body assert \"message\" in body #", "assert \"ok\" not in body assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={", "= str(<KEY>()) total = 3000 # Validate the token res = requests.post( payment_api_url+\"/backend/validate\",", "payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\":", "\"message\" in body assert \"paymentToken\" in body[\"message\"] # Cleanup cancelPayment requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\":", "/backend/validate without a payment token \"\"\" card_number = \"1234567890123456\" total = 3000 #", "# Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total", "3000 # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token,", "the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert", "in body assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token })", "body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url):", "in body assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token })", "token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total } ) assert", "def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a smaller total \"\"\" card_number", "json={ \"paymentToken\": payment_token, \"total\": total+2000 } ) assert res.status_code == 200 body =", "token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"]", "json={ \"paymentToken\": payment_token } ) assert res.status_code == 400 body = res.json() assert", "@pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url,", "pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def", "}) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a higher total \"\"\"", "token \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment token", "\"ok\" in body assert \"message\" not in body assert body[\"ok\"] == False def", "payment_api_url): \"\"\" Test /backend/validate without IAM authorization \"\"\" card_number = \"1234567890123456\" total =", "json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without a", "body assert \"paymentToken\" in body[\"message\"] # Cleanup cancelPayment requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token })", "iam_auth): \"\"\" Test /backend/validate with a smaller total \"\"\" card_number = \"1234567890123456\" total", "disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url,", "# Validate the token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total", "True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\"", "} ) assert res.status_code == 200 body = res.json() assert \"ok\" in body", "body = res.json() assert \"ok\" in body assert \"message\" not in body assert", "total } ) assert res.status_code == 400 body = res.json() assert \"ok\" not", "authorization \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment token", "str(<KEY>()) total = 3000 # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url),", "\"message\" not in body assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\":", "\"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 403 body = res.json()", "uuid import pytest import requests from fixtures import iam_auth # pylint: disable=import-error from", "auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert res.status_code == 400 body = res.json()", "payment_token, \"total\": total } ) assert res.status_code == 403 body = res.json() assert", "assert \"ok\" not in body assert \"message\" in body assert \"total\" in body[\"message\"]", "total } ) assert res.status_code == 403 body = res.json() assert \"ok\" not", "res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert res.status_code ==", "# Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test", "res.status_code == 400 body = res.json() assert \"ok\" not in body assert \"message\"", "\"\"\" Test /backend/validate without an total \"\"\" card_number = \"1234567890123456\" total = 3000", "payment_token, \"total\": total-1000 } ) assert res.status_code == 200 body = res.json() assert", "iam_auth): \"\"\" Test /backend/validate with a higher total \"\"\" card_number = \"1234567890123456\" total", "payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={", "payment_api_url, iam_auth): \"\"\" Test /backend/validate with a non-existent token \"\"\" payment_token = str(<KEY>())", "\"ok\" not in body assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\":", "# Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test", "card_number = \"1234567890123456\" total = 3000 # Create a payment token res_3p =", "json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without an", "assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url,", "assert \"message\" in body assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\":", "= \"1234567890123456\" total = 3000 # Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\",", "with a smaller total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create", "assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url,", "# pylint: disable=import-error from helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url():", "res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token,", "token \"\"\" payment_token = str(<KEY>()) total = 3000 # Validate the token res", "\"paymentToken\": payment_token, \"total\": total+2000 } ) assert res.status_code == 200 body = res.json()", "payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 200", "json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM authorization", "payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without an total \"\"\"", "json={ \"cardNumber\": card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token", "token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert res.status_code", "# Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\":", "assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url,", "json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a", "json={ \"paymentToken\": payment_token, \"total\": total-1000 } ) assert res.status_code == 200 body =", "Test /backend/validate with a higher total \"\"\" card_number = \"1234567890123456\" total = 3000", "token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 } )", "in body assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token })", "requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 } ) assert res.status_code ==", "not in body assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token", "\"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a non-existent", "\"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without an total", "payment_token, \"total\": total } ) assert res.status_code == 200 body = res.json() assert", "\"\"\" Test /backend/validate without a payment token \"\"\" card_number = \"1234567890123456\" total =", "higher total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment", "not in body assert \"message\" in body assert \"total\" in body[\"message\"] # Cleanup", "\"cardNumber\": card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res", "test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without a payment token \"\"\" card_number =", "def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without an total \"\"\" card_number =", "a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total }) payment_token", "\"1234567890123456\" total = 3000 # Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={", "3000 # Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\":", "Test /backend/validate without IAM authorization \"\"\" card_number = \"1234567890123456\" total = 3000 #", "Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate", "import requests from fixtures import iam_auth # pylint: disable=import-error from helpers import get_parameter", "= res.json() assert \"ok\" not in body assert \"message\" in body assert \"total\"", "\"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a", "test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a smaller total \"\"\" card_number =", "test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without an total \"\"\" card_number = \"1234567890123456\"", "in body assert \"paymentToken\" in body[\"message\"] # Cleanup cancelPayment requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token", "not in body assert \"message\" in body assert \"paymentToken\" in body[\"message\"] # Cleanup", "}) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url),", "json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a", "import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url():", "def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM authorization \"\"\" card_number = \"1234567890123456\"", "Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000", "\"\"\" Test /backend/validate with a non-existent token \"\"\" payment_token = str(<KEY>()) total =", "False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test", "the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total }", "requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM", "Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total", "res.status_code == 200 body = res.json() assert \"ok\" in body assert \"message\" not", "\"ok\" not in body assert \"message\" in body assert \"paymentToken\" in body[\"message\"] #", "assert \"message\" not in body assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={", "payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number =", "payment_api_url, iam_auth): \"\"\" Test /backend/validate without an total \"\"\" card_number = \"1234567890123456\" total", "== False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\"", "total } ) assert res.status_code == 200 body = res.json() assert \"ok\" in", "payment_token = str(<KEY>()) total = 3000 # Validate the token res = requests.post(", "res.json() assert \"ok\" not in body assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\",", "\"paymentToken\": payment_token, \"total\": total-1000 } ) assert res.status_code == 200 body = res.json()", "body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with", "in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth):", "assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a", "Validate the token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total }", "\"ok\" not in body assert \"message\" in body assert \"total\" in body[\"message\"] #", "\"ok\" in body assert \"message\" not in body assert body[\"ok\"] == True #", "in body assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token })", "res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] #", "\"message\" not in body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\"", "== 200 body = res.json() assert \"ok\" in body assert \"message\" not in", "total+2000 } ) assert res.status_code == 200 body = res.json() assert \"ok\" in", "payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without a payment token", "== True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth):", "auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 200 body", "return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\"", "}) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without an total \"\"\" card_number", "in body assert \"message\" not in body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url,", "\"paymentToken\": payment_token } ) assert res.status_code == 400 body = res.json() assert \"ok\"", "assert \"message\" not in body assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={", "}) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without a payment token \"\"\"", "res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total } ) assert", "res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code", "/backend/validate with a higher total \"\"\" card_number = \"1234567890123456\" total = 3000 #", "from helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\")", "total = 3000 # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={", "body assert \"message\" in body assert \"paymentToken\" in body[\"message\"] # Cleanup cancelPayment requests.post(payment_3p_api_url+\"/cancelPayment\",", "\"total\": total+2000 } ) assert res.status_code == 200 body = res.json() assert \"ok\"", "Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate", "test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM authorization \"\"\" card_number = \"1234567890123456\" total", "in body assert \"message\" in body assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\",", "\"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a higher", "\"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post(", "without IAM authorization \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a", "}) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a non-existent token \"\"\"", "body assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def", "# Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total }", "pylint: disable=import-error from helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return", "== False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a smaller total", "\"ok\" in body assert \"message\" not in body assert body[\"ok\"] == False #", ") assert res.status_code == 403 body = res.json() assert \"ok\" not in body", "requests from fixtures import iam_auth # pylint: disable=import-error from helpers import get_parameter #", "the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 }", "assert res.status_code == 200 body = res.json() assert \"ok\" in body assert \"message\"", "assert \"ok\" in body assert \"message\" not in body assert body[\"ok\"] == False", "body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\"", "== True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth):", "/backend/validate \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment token", "body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url,", "with a higher total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create", "a non-existent token \"\"\" payment_token = str(<KEY>()) total = 3000 # Validate the", "fixtures import iam_auth # pylint: disable=import-error from helpers import get_parameter # pylint: disable=import-error,no-name-in-module", "body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url,", "res.json() assert \"ok\" in body assert \"message\" not in body assert body[\"ok\"] ==", "res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\":", "body assert \"message\" not in body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url,", "get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate", "iam_auth): \"\"\" Test /backend/validate without an total \"\"\" card_number = \"1234567890123456\" total =", "Test /backend/validate \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment", "res.json() assert \"ok\" not in body assert \"message\" in body assert \"total\" in", "= requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate", "Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate", "\"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 200 body = res.json()", "<filename>payment/tests/integ/test_api.py import uuid import pytest import requests from fixtures import iam_auth # pylint:", "= requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 } ) assert res.status_code", "in body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate", "Test /backend/validate without an total \"\"\" card_number = \"1234567890123456\" total = 3000 #", "= res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\":", "requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert res.status_code == 400 body", "res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token", "\"total\": total-1000 } ) assert res.status_code == 200 body = res.json() assert \"ok\"", "requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate the", "the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 }", "def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\" total =", "\"\"\" payment_token = str(<KEY>()) total = 3000 # Validate the token res =", "without an total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a", "token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert res.status_code", "payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total }) payment_token =", "True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\"", "test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\" total = 3000", "= 3000 # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\":", "200 body = res.json() assert \"ok\" in body assert \"message\" not in body", "body assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def", "body assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def", "def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number", "body assert body[\"ok\"] == True # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def", "} ) assert res.status_code == 403 body = res.json() assert \"ok\" not in", "test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a non-existent token \"\"\" payment_token =", "payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a non-existent token", "/backend/validate with a non-existent token \"\"\" payment_token = str(<KEY>()) total = 3000 #", "body = res.json() assert \"ok\" not in body assert \"message\" in body assert", "auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 } ) assert res.status_code == 200 body", "payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 } ) assert res.status_code == 200", "\"\"\" Test /backend/validate with a smaller total \"\"\" card_number = \"1234567890123456\" total =", "payment_api_url, iam_auth): \"\"\" Test /backend/validate with a smaller total \"\"\" card_number = \"1234567890123456\"", "requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 403", "token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 } )", "get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\" total", "# Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token }", "the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert", "\"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without a payment", "= requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } ) assert res.status_code == 400", "payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 } ) assert res.status_code == 200", "iam_auth): \"\"\" Test /backend/validate without a payment token \"\"\" card_number = \"1234567890123456\" total", "body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\"", "body assert \"message\" in body assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={", "Test /backend/validate without a payment token \"\"\" card_number = \"1234567890123456\" total = 3000", "\"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment token res_3p", "= requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code ==", "# Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test", "Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token } )", "auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 } ) assert res.status_code == 200 body", "Test /backend/validate with a non-existent token \"\"\" payment_token = str(<KEY>()) total = 3000", "token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total } )", "= requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert res.status_code == 400", "disable=import-error from helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\")", "res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total", "assert \"ok\" in body assert \"message\" not in body assert body[\"ok\"] == True", "get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return", "import pytest import requests from fixtures import iam_auth # pylint: disable=import-error from helpers", "assert \"ok\" not in body assert \"message\" in body assert \"paymentToken\" in body[\"message\"]", "@pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\"", "def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a higher total \"\"\" card_number", "= res.json() assert \"ok\" not in body assert \"message\" in body # Cleanup", "body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a smaller", "payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert res.status_code == 400 body =", "iam_auth # pylint: disable=import-error from helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def", "payment_api_url, iam_auth): \"\"\" Test /backend/validate \"\"\" card_number = \"1234567890123456\" total = 3000 #", "json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 200 body =", "False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a smaller total \"\"\"", "res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert res.status_code ==", "}) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", json={", "payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 403 body", "res.json() assert \"ok\" not in body assert \"message\" in body assert \"paymentToken\" in", "\"message\" not in body assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\":", "= res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token,", "/backend/validate with a smaller total \"\"\" card_number = \"1234567890123456\" total = 3000 #", "Test /backend/validate with a smaller total \"\"\" card_number = \"1234567890123456\" total = 3000", "} ) assert res.status_code == 400 body = res.json() assert \"ok\" not in", "iam_auth): \"\"\" Test /backend/validate with a non-existent token \"\"\" payment_token = str(<KEY>()) total", "== 400 body = res.json() assert \"ok\" not in body assert \"message\" in", "from fixtures import iam_auth # pylint: disable=import-error from helpers import get_parameter # pylint:", "== 403 body = res.json() assert \"ok\" not in body assert \"message\" in", "res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000 } ) assert", "payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate without IAM authorization \"\"\" card_number", "an total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment", ") assert res.status_code == 400 body = res.json() assert \"ok\" not in body", "test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a higher total \"\"\" card_number =", "without a payment token \"\"\" card_number = \"1234567890123456\" total = 3000 # Create", "payment token \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment", "payment_api_url, iam_auth): \"\"\" Test /backend/validate with a higher total \"\"\" card_number = \"1234567890123456\"", "return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\") def test_backend_validate(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test", "Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } )", "not in body assert body[\"ok\"] == False # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token", "Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number, \"amount\": total })", "requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code ==", "400 body = res.json() assert \"ok\" not in body assert \"message\" in body", "assert res.status_code == 400 body = res.json() assert \"ok\" not in body assert", "/backend/validate without an total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create", "requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert res.status_code == 400 body", "helpers import get_parameter # pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def", "in body assert \"message\" not in body assert body[\"ok\"] == True # Cleanup", "smaller total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment", "def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without a payment token \"\"\" card_number", "\"total\": total } ) assert res.status_code == 200 body = res.json() assert \"ok\"", "res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 } ) assert", "not in body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test", "total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment token", "requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without", "auth=iam_auth(payment_api_url), json={ \"total\": total } ) assert res.status_code == 400 body = res.json()", "= res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\":", "# Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_iam(payment_3p_api_url, payment_api_url): \"\"\" Test /backend/validate", "IAM authorization \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a payment", "in body assert \"message\" in body assert \"paymentToken\" in body[\"message\"] # Cleanup cancelPayment", "\"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url,", "in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url, iam_auth):", "\"total\": total } ) assert res.status_code == 400 body = res.json() assert \"ok\"", "assert \"message\" not in body assert body[\"ok\"] == False def test_backend_validate_smaller_total(payment_3p_api_url, payment_api_url, iam_auth):", "= 3000 # Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\": card_number,", "Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate", "payment_token, \"total\": total+2000 } ) assert res.status_code == 200 body = res.json() assert", "requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with", "non-existent token \"\"\" payment_token = str(<KEY>()) total = 3000 # Validate the token", "total }) payment_token = res_3p.json()[\"paymentToken\"] # Validate the token res = requests.post( payment_api_url+\"/backend/validate\",", "= res.json() assert \"ok\" not in body assert \"message\" in body assert \"paymentToken\"", "total = 3000 # Create a payment token res_3p = requests.post(payment_3p_api_url+\"/preauth\", json={ \"cardNumber\":", "# pylint: disable=import-error,no-name-in-module @pytest.fixture(scope=\"module\") def payment_3p_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment-3p/api/url\") @pytest.fixture(scope=\"module\") def payment_api_url(): return get_parameter(\"/ecommerce/{Environment}/payment/api/url\")", "json={ \"total\": total } ) assert res.status_code == 400 body = res.json() assert", "the token res = requests.post( payment_api_url+\"/backend/validate\", json={ \"paymentToken\": payment_token, \"total\": total } )", "\"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_payment_token(payment_3p_api_url, payment_api_url,", "requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total-1000 } ) assert res.status_code ==", "not in body assert \"message\" in body # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token", "requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_higher_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with", "def test_backend_validate_non_existent(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate with a non-existent token \"\"\" payment_token", "a smaller total \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a", "\"\"\" Test /backend/validate without IAM authorization \"\"\" card_number = \"1234567890123456\" total = 3000", "= res.json() assert \"ok\" in body assert \"message\" not in body assert body[\"ok\"]", "\"message\" in body assert \"total\" in body[\"message\"] # Cleanup requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token", "requests.post(payment_3p_api_url+\"/cancelPayment\", json={ \"paymentToken\": payment_token }) def test_backend_validate_no_total(payment_3p_api_url, payment_api_url, iam_auth): \"\"\" Test /backend/validate without", "import uuid import pytest import requests from fixtures import iam_auth # pylint: disable=import-error", "Validate the token res = requests.post( payment_api_url+\"/backend/validate\", auth=iam_auth(payment_api_url), json={ \"paymentToken\": payment_token, \"total\": total+2000", ") assert res.status_code == 200 body = res.json() assert \"ok\" in body assert", "json={ \"paymentToken\": payment_token, \"total\": total } ) assert res.status_code == 403 body =", "res.status_code == 403 body = res.json() assert \"ok\" not in body assert \"message\"", "a payment token \"\"\" card_number = \"1234567890123456\" total = 3000 # Create a" ]
[ "test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2, 3]", "import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir", "test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells =", "cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0])", "assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2,", "self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0", "3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert", "get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells", "import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution,", "self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert", "test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) ==", "is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single", "test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert", "create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model", "is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells =", "cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0])", "..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir =", "is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells =", "cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert", "get_choices(cells[0]) for i in [1, 2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\")", "cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def", "self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self):", "for i in [1, 2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert", "create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self):", "clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice) assert not has_solution(cells[0])", "3] assert len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0]) for i in [1,", "def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice)", "len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0]) for i in [1, 2, 3]])", "self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3", "in get_choices(cells[0]) for i in [1, 2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single", "Choice\") assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def", "None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice)", "[1, 2, 3] assert len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0]) for i", "== 3 assert all([i in get_choices(cells[0]) for i in [1, 2, 3]]) def", "[1, 2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None", "== 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice) assert not has_solution(cells[0]) def tearDown(self): self.tmp_dir.cleanup()", "0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0])) == 3 assert all([i in", "in [1, 2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is", "Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice", "= create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def", "== 0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0])) == 3 assert all([i", "def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells", "i in [1, 2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0])", "nbformat import unittest from e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice,", "Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self):", "cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice", "setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice =", "PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice)", "is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1,", "== 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0])", "cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells", "2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice) assert", "3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells =", "assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells", "2, 3]]) def test_get_num_of_choices(self): cells = self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells", "2, 3] assert len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0]) for i in", "assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice)", "e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices,", "from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from", ") from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course()", "= [1, 2, 3] assert len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0]) for", "TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir)", "cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0]))", "self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells", "clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir", "= self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) ==", "from e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices,", "self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0])) ==", "assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice) assert not has_solution(cells[0]) def", "3 assert all([i in get_choices(cells[0]) for i in [1, 2, 3]]) def test_get_num_of_choices(self):", "= self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self):", "cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0])", "def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice", "= PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells =", "get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir,", "import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import", "<filename>e2xgrader/tests/utils/test_extra_cells.py import nbformat import unittest from e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import (", "tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple", "( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course", "def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2,", "all([i in get_choices(cells[0]) for i in [1, 2, 3]]) def test_get_num_of_choices(self): cells =", "test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert", "assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0])) == 3", "import nbformat import unittest from e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell,", "def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\")", "self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells", "e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils", "tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell())", "test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert", "self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self):", "= [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells =", "3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice) assert not", "assert all([i in get_choices(cells[0]) for i in [1, 2, 3]]) def test_get_num_of_choices(self): cells", "get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3]", "is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0])", "assert len(get_choices(cells[0])) == 3 assert all([i in get_choices(cells[0]) for i in [1, 2,", "assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self):", "class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model =", "\"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def", "coursedir = create_temp_course() self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\"", "import unittest from e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice,", "PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, )", "len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0])) == 3 assert", "assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice =", "len(get_choices(cells[0])) == 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice) assert not has_solution(cells[0]) def tearDown(self):", "= self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice =", "has_solution, ) from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir =", "unittest from e2xgrader.models import PresetModel from e2xgrader.utils.extra_cells import ( is_extra_cell, is_multiplechoice, is_singlechoice, get_choices,", "is_extra_cell, is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course class", "= \"Multiple Choice\" def test_extra_cell(self): assert not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0])", "not is_extra_cell(nbformat.v4.new_code_cell()) cells = self.model.get_question_preset(self.multiplechoice) assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert", "= self.model.get_question_preset(self.multiplechoice) assert len(get_choices(cells[0])) == 0 cells[0].metadata.extended_cell.choice = [1, 2, 3] assert len(get_choices(cells[0]))", "is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def", "= self.model.get_question_preset(\"Single Choice\") assert get_num_of_choices(cells[0]) is None cells = self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) ==", "= self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def", "def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0]))", "= tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert not", "def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def test_get_choices(self): cells = self.model.get_question_preset(self.multiplechoice)", "get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self):", "self.tmp_dir = tmp_dir self.model = PresetModel(coursedir) self.multiplechoice = \"Multiple Choice\" def test_extra_cell(self): assert", "= self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells = self.model.get_question_preset(\"Single Choice\") assert is_singlechoice(cells[0]) def", "cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0", "from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase): def setUp(self): tmp_dir, coursedir = create_temp_course() self.tmp_dir", "self.model.get_question_preset(self.multiplechoice) assert get_num_of_choices(cells[0]) == 3 def test_clear_choices(self): cells = self.model.get_question_preset(self.multiplechoice) cells[0].metadata.extended_cell.choice = [1,", "is_multiplechoice, is_singlechoice, get_choices, get_num_of_choices, clear_choices, has_solution, ) from ..test_utils.test_utils import create_temp_course class TestExtraCells(unittest.TestCase):", "assert is_extra_cell(cells[0]) def test_multiplechoice_cell(self): cells = self.model.get_question_preset(self.multiplechoice) assert is_multiplechoice(cells[0]) def test_singlechoice_cell(self): cells =", "[1, 2, 3] clear_choices(cells[0]) assert len(get_choices(cells[0])) == 0 def test_has_solution(self): cells = self.model.get_question_preset(self.multiplechoice)" ]